hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
34bbafd4c9930c0faccaa0114904fc2722169c13
| 778
|
py
|
Python
|
manage.py
|
YaroslavChyhryn/SchoolAPI
|
6b5eb4e1faf6b962561109fc227057ad0f8d4d92
|
[
"MIT"
] | null | null | null |
manage.py
|
YaroslavChyhryn/SchoolAPI
|
6b5eb4e1faf6b962561109fc227057ad0f8d4d92
|
[
"MIT"
] | null | null | null |
manage.py
|
YaroslavChyhryn/SchoolAPI
|
6b5eb4e1faf6b962561109fc227057ad0f8d4d92
|
[
"MIT"
] | null | null | null |
from flask_script import Manager, prompt_bool
# from flask_migrate import Migrate, MigrateCommand
from school_api.app import create_app
from school_api.db import create_tables, drop_tables
from school_api.data_generator import test_db
"""
Refused flask_migration because it was overkill for this project
"""
app = create_app()
# migrate = Migrate(app, db)
manager = Manager(app)
# manager.add_command('db', MigrateCommand)
@manager.command
def createtables():
drop_tables(app)
create_tables(app)
@manager.command
def testdb():
drop_tables(app)
create_tables(app)
test_db(app)
@manager.command
def droptables():
if prompt_bool("Are you sure you want to lose all your data"):
drop_tables(app)
if __name__ == '__main__':
manager.run()
| 20.473684
| 66
| 0.746787
| 110
| 778
| 5.018182
| 0.418182
| 0.081522
| 0.070652
| 0.068841
| 0.101449
| 0.101449
| 0
| 0
| 0
| 0
| 0
| 0
| 0.161954
| 778
| 37
| 67
| 21.027027
| 0.846626
| 0.151671
| 0
| 0.380952
| 0
| 0
| 0.087329
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.142857
| false
| 0
| 0.190476
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
34bcda748e6f244af235e4cdcc2cf69df9e0d4a6
| 2,512
|
py
|
Python
|
info_modules/custom/example/layer_info.py
|
HusseinKabbout/qwc-feature-info-service
|
3d7cdbc1a3dc4a3725ba0529204848d47c4ed87e
|
[
"MIT"
] | null | null | null |
info_modules/custom/example/layer_info.py
|
HusseinKabbout/qwc-feature-info-service
|
3d7cdbc1a3dc4a3725ba0529204848d47c4ed87e
|
[
"MIT"
] | null | null | null |
info_modules/custom/example/layer_info.py
|
HusseinKabbout/qwc-feature-info-service
|
3d7cdbc1a3dc4a3725ba0529204848d47c4ed87e
|
[
"MIT"
] | 2
|
2020-03-24T09:13:14.000Z
|
2021-09-29T10:43:31.000Z
|
# Sample implementation of a custom layer info module
def layer_info(layer, x, y, crs, params, identity):
"""Query layer and return info result as dict:
{
'features': [
{
'id': <feature ID>, # optional
'attributes': [
{
'name': '<attribute name>',
'value': '<attribute value>'
}
],
'bbox': [<minx>, <miny>, <maxx>, <maxy>], # optional
'geometry': '<WKT geometry>' # optional
}
]
}
:param str layer: Layer name
:param float x: X coordinate of query
:param float y: Y coordinate of query
:param str crs: CRS of query coordinates
:param obj params: FeatureInfo service params
{
'i': <X ordinate of query point on map, in pixels>,
'j': <Y ordinate of query point on map, in pixels>,
'height': <Height of map output, in pixels>,
'width': <Width of map output, in pixels>,
'bbox': '<Bounding box for map extent as minx,miny,maxx,maxy>',
'crs': '<CRS for map extent>',
'feature_count': <Max feature count>,
'with_geometry': <Whether to return geometries in response
(default=1)>,
'with_maptip': <Whether to return maptip in response
(default=1)>,
'FI_POINT_TOLERANCE': <Tolerance for picking points, in pixels
(default=16)>,
'FI_LINE_TOLERANCE': <Tolerance for picking lines, in pixels
(default=8)>,
'FI_POLYGON_TOLERANCE': <Tolerance for picking polygons, in pixels
(default=4)>,
'resolution': <Resolution in map units per pixel>
}
:param str identity: User name or Identity dict
"""
features = []
feature_id = 123
attributes = [
{
'name': 'title',
'value': 'Feature for Layer %s' % layer
},
{
'name': 'name',
'value': 'Feature Name'
}
]
px = round(x)
py = round(y)
bbox = [px - 50, py - 50, px + 50, py + 50]
geometry = "POINT(%s %s)" % (px, py)
features.append({
'id': feature_id,
'attributes': attributes,
'bbox': bbox,
'geometry': geometry
})
return {
'features': features
}
| 31.797468
| 78
| 0.48328
| 255
| 2,512
| 4.713725
| 0.364706
| 0.046589
| 0.052413
| 0.069884
| 0.086522
| 0.054908
| 0.054908
| 0.054908
| 0
| 0
| 0
| 0.011311
| 0.401672
| 2,512
| 78
| 79
| 32.205128
| 0.788423
| 0.680732
| 0
| 0
| 0
| 0
| 0.164274
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.038462
| false
| 0
| 0
| 0
| 0.076923
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
34bce8f103a1242d4cbbb176bc3c65328694b160
| 20,740
|
py
|
Python
|
integration/gCalIntegration.py
|
conzty01/RA_Scheduler
|
6bf4931871aef4058d93917e62ceb31766e06b3a
|
[
"MIT"
] | 1
|
2021-03-31T05:26:17.000Z
|
2021-03-31T05:26:17.000Z
|
integration/gCalIntegration.py
|
conzty01/RA_Scheduler
|
6bf4931871aef4058d93917e62ceb31766e06b3a
|
[
"MIT"
] | 83
|
2018-03-19T18:32:34.000Z
|
2022-02-01T02:15:01.000Z
|
integration/gCalIntegration.py
|
conzty01/RA_Scheduler
|
6bf4931871aef4058d93917e62ceb31766e06b3a
|
[
"MIT"
] | 2
|
2021-01-15T22:16:00.000Z
|
2021-02-10T01:03:32.000Z
|
from google.auth.transport.requests import Request
from googleapiclient.discovery import build
from googleapiclient.errors import HttpError
import google_auth_oauthlib.flow
import logging
import os
class gCalIntegratinator:
""" Object for handling interactions between RADSA and Google Calendar API.
This class uses the googleapiclient to interact with the Google Calendar API.
AUTHORIZATION WORKFLOW:
1) Redirect user to the Authorization URL
2) User consents to Google Calendar integration and is
redirected back to this application
3) The Authorization Response and State are returned from Google
and used to generate user credentials
4) User credentials are returned back to the application where they
are stored in the DB for later use
Method Return Statuses:
-5: Error Creating Google Calendar Event
-4: Invalid Calendar Id
-3: Invalid Credentials Received
-2: Need to Renew Credentials
-1: Unknown Error Occurred
0: Credentials are valid but needed refresh
1: Success
Args:
scopes (lst): A list containing the scopes required to interact with the
Google Calendar API. The default provided are
- .../auth/calendar.calendarlist.readonly
- .../auth/calendar.app.created
"""
SCOPES = ['https://www.googleapis.com/auth/calendar.app.created',
'https://www.googleapis.com/auth/calendar.calendarlist.readonly']
def __init__(self, scopes=SCOPES):
logging.debug("Creating gCalIntegratinator Object")
# Name of Google service being used
self.serviceName = "calendar"
# API version number of Google service being used
self.serviceVersion = "v3"
# Set the scopes for reference
self.scopes = scopes
# Load the app credentials from the environment
self.__appCreds = self._getCredsFromEnv()
# Generate the oAuth2 flow for handling the client/app authentication
self.flow = google_auth_oauthlib.flow.Flow.from_client_config(
self.__appCreds, scopes=scopes)
def _getCredsFromEnv(self):
# This will return a deserialized JSON object that is assembled per
# Google's specifications. This object will be configured for a 'web' app
# This does assume the following parameters are available in the environment:
# CLIENT_ID
# PROJECT_ID
# AUTH_URI
# TOKEN_URI
# AUTH_PROVIDER_X509_CERT_URL
# CLIENT_SECRET
# REDIRECT_URIS -> This should be the urls separated by a ',' only
# JAVASCRIPT_ORIGINS -> This should be the urls separated by a ',' only
logging.info("Loading app settings from environment")
return {
"web": {
"client_id": os.environ["CLIENT_ID"],
"project_id": os.environ["PROJECT_ID"],
"auth_uri": os.environ["AUTH_URI"],
"token_uri": os.environ["TOKEN_URI"],
"auth_provider_x509_cert_url": os.environ["AUTH_PROVIDER_X509_CERT_URL"],
"client_secret": os.environ["CLIENT_SECRET"],
"redirect_uris": [entry for entry in os.environ["REDIRECT_URIS"].split(",")],# ["https://b03bb12e8ff3.ngrok.io"],
"javascript_origins": [entry for entry in os.environ["JAVASCRIPT_ORIGINS"].split(",")]
}
}
def _validateCredentials(self, creds):
# Check to see if the client credentials are valid and that they have not
# expired. This method can have the following outcomes:
#
# If the credentials are valid, then the credentials will be returned
# If the credentials are not valid, an InvalidCalendarCredentialsError will be raised.
# If the credentials have expired, an ExpiredCalendarCredentialsError will be raised.
logging.debug("Checking Credentials")
try:
# Are the credentials invalid?
if not creds.valid:
# If the credentials are expired and can be refreshed,
# then refresh the credentials
if creds.expired and creds.refresh_token:
creds.refresh(Request())
else:
# Otherwise we will need to prompt the user to log in
# and approve integration again.
logging.debug("Manual Credential Refresh Required")
raise self.ExpiredCalendarCredentialsError("Manual Credential Refresh Required")
else:
# If the credentials are valid, return successful
logging.debug("Credentials Valid")
except AttributeError:
# If we receive an AttributeError, then we did not receive the expected
# credentials object.
# Log the occurrence
logging.info("Invalid Credentials Received")
# Raise an InvalidCalendarCredentialsError
raise self.InvalidCalendarCredentialsError("Invalid Credentials Received")
except self.ExpiredCalendarCredentialsError as e:
# If we receive an ExpiredCalendarCredentialsError, then simply pass that up to
# the calling method.
raise e
except Exception as e:
# If we receive some other, unexpected Exception, then notify the calling method
# Log the occurrence
logging.error(str(e))
# Raise an UnknownError
raise self.UnexpectedError("Calendar Credential Validation", e, str(e))
# If we made it this far without raising an exception,
# then the credentials are valid.
return creds
def generateAuthURL(self, redirect_uri):
# Generate and return an authorization url as well as a state
logging.info("Generating Google Authorization URL")
# Set the flow's redirect_uri
self.flow.redirect_uri = redirect_uri
# Return (auth_url, state) for the given redirect_uri
return self.flow.authorization_url(access_type="offline",
include_granted_scopes="true",
prompt="select_account")
def handleAuthResponse(self, auth_response, redirect_uri):
# Generate authorization credentials from the authorization response
logging.info("Generating Google Client Credentials")
self.flow.redirect_uri = redirect_uri
# Generate the token
self.flow.fetch_token(authorization_response=auth_response)
return self.flow.credentials
def createGoogleCalendar(self, client_creds):
# Create a Secondary Google Calendar using the user credentials
logging.info("Creating Google Calendar")
# Check to make sure the credentials are valid
client_creds = self._validateCredentials(client_creds)
# Build the Google Calendar service with appropriate version
service = build(self.serviceName, self.serviceVersion, credentials=client_creds)
# Create the body of the request to create a new Google Calendar.
newCalBody = {
"summary": "RA Duty Schedule",
"description": "Calendar for the Resident Assistant Duty Schedule.\n\n"
"Created and added to by the RA Duty Scheduler Application (RADSA)."
}
try:
# Call the Google Calendar API Service to have the new calendar created
created_calendar = service.calendars().insert(body=newCalBody).execute()
except Exception as e:
# If we received an exception, then wrap it in an CalendarCreationError and
# pass that up to the calling function.
# Log the occurrence
logging.error("Error encountered when attempting to create Google Calendar: {}".format(str(e)))
# Raise the CalendarCreationError
raise self.CalendarCreationError(str(e))
logging.info("Calendar Creation Complete")
# Return the ID of the new calendar.
return created_calendar["id"]
def exportScheduleToGoogleCalendar(self, client_creds, calendarId, schedule, flaggedDutyLabel):
# Export the provided schedule to Google Calendar
# Check to make sure the credentials are valid
client_creds = self._validateCredentials(client_creds)
# Create the Google Calendar Service
service = build(self.serviceName, self.serviceVersion, credentials=client_creds)
# Check to see if the 'RA Duty Schedule' calendar exists. If not, create
# the calendar.
try:
logging.debug("Verifying that the 'RA Schedule Calendar' exists.")
res = service.calendarList().get(calendarId=calendarId).execute()
logging.debug("CalendarList().get() Result: {}".format(res))
except HttpError as e:
# An HttpError occurred which could indicate that the calendar no longer exists.
# If this is the case, the HttpError would be a 404 error.
# Log the occurrence of this issue.
logging.info("'RA Schedule Calendar' not found for client.")
logging.error(str(e))
# Plan B is to create a new Google Calendar.
try:
# Create the calendar using the client_creds
calendarId = self.createGoogleCalendar(client_creds)
except self.CalendarCreationError as subE:
# An error occurred when attempting to create the Calendar.
# Wrap the exception in a ScheduleExportError and raise it
raise self.ScheduleExportError(subE, "Unable to locate valid Google Calendar.")
# Once we are able to locate the calendar, start adding the events to it!
try:
logging.info("Exporting schedule")
# Iterate through the schedule
for duty in schedule:
# Check to see if this duty should be flagged
if "flagged" in duty["extendedProps"].keys() and duty["extendedProps"]["flagged"]:
# If so, then set the summary and description messages to include the flagged
# duty label.
summaryMsg = duty["title"] + " ({})".format(flaggedDutyLabel)
descriptionMsg = duty["title"] + " has been assigned for {} duty.".format(flaggedDutyLabel)
else:
# Otherwise, set the summary and description messages to be the default
summaryMsg = duty["title"]
descriptionMsg = duty["title"] + " has been assigned for duty."
# Create an Event Object that will handle assembling the event's body for the Google Calendar API
eb = Event(summaryMsg,
descriptionMsg,
duty["start"])
# Call the Google Calendar API to add the event
service.events().insert(calendarId=calendarId,
body=eb.getBody(),
supportsAttachments=False).execute()
except HttpError as e:
# An HttpError could indicate a number of things including a missing calendar or a
# Bad Request/malformed data. If this occurs, stop processing and report back to the
# server.
# Log the occurrence
logging.info("Error encountered while pushing Event: {} to Google Calendar".format(duty["start"]))
logging.error(str(e))
# Wrap the exception in a ScheduleExportError and raise it
raise self.ScheduleExportError(e, "Unable to export schedule to Google Calendar.")
logging.info("Export complete")
class BaseGCalIntegratinatorException(Exception):
# Base GCalIntegratinator Exception
def __init__(self, *args):
# If args are provided
if args:
# Then set the message as the first argument
self.message = args[0]
logging.debug("BASE ERROR CREATION: {}".format(args))
else:
# Otherwise set the message to None
self.message = None
# Set the exception name to GCalIntegratinatorError
self.exceptionName = "GCalIntegratinatorError"
def __str__(self):
# If a message has been defined
if self.message is not None:
# Then put the message in the string representation
return "{}".format(self.message)
else:
# Otherwise return a default string
return "{} has been raised".format(self.exceptionName)
class CalendarCreationError(BaseGCalIntegratinatorException):
"""GCalIntegratinator Exception to be raised when an error occurs
during the creation of the a Google Calendar."""
def __init__(self, *args):
# Pass the arguments to the parent class.
super().__init__(*args)
# Set the name of the exception
self.exceptionName = "GoogleCalendarCreationError"
class InvalidCalendarCredentialsError(BaseGCalIntegratinatorException):
"""GCalIntegratinator Exception to be raised if the provided
Google Calendar credentials are invalid."""
def __init__(self, *args):
# Pass the arguments to the parent class.
super().__init__(*args)
# Set the name of the exception
self.exceptionName = "InvalidCalendarCredentialsError"
class ExpiredCalendarCredentialsError(BaseGCalIntegratinatorException):
"""GCalIntegratinator Exception to be raised if the provided Google
Calendar calendar credentials have expired."""
def __init__(self, *args):
# Pass the arguments to the parent class.
super().__init__(*args)
# Set the name of the exception
self.exceptionName = "ExpiredCalendarCredentialsError"
class ScheduleExportError(BaseGCalIntegratinatorException):
"""GCalIntegratinator Exception to be raised if an error is encountered
when attempting to export a schedule to Google Calendar."""
def __init__(self, wrappedException, *args):
# Pass the arguments to the parent class.
super().__init__(*args)
# Set the name of the exception
self.exceptionName = "ScheduleExportError"
# Set the wrappedException
self.wrappedException = wrappedException
class UnexpectedError(BaseGCalIntegratinatorException):
"""GCalIntegratinator Exception to be raised if an unknown
error occurs within the GCalIntegratintor object"""
def __init__(self, location, wrappedException, *args):
# Pass the arguments to the parent class.
super().__init__(self, args)
# Set the name of the exception
self.exceptionName = "GCalIntegratinatorUnknownError"
# Set the location of where the error occurred.
self.exceptionLocation = location
# Set the wrapped exception
self.wrappedException = wrappedException
class Event:
""" Object for abstracting the Event schema that is used by the Google Calendar API """
def __init__(self, summary, description, date):
self.__body = {
# Taken from https://googleapis.github.io/google-api-python-client/docs/dyn/calendar_v3.events.html#insert
# with supplemental information from https://developers.google.com/calendar/v3/reference/events/insert
"summary": summary, # Title of the event.
"description": description, # Description of the event. Can contain HTML. Optional.
"start": { # The (inclusive) start time of the event. For a recurring event, this is the
# start time of the first instance.
"date": date # The date, in the format "yyyy-mm-dd", if this is an all-day event.
},
"end": { # The (exclusive) end time of the event. For a recurring event,
# this is the end time of the first instance.
"date": date # The date, in the format "yyyy-mm-dd", if this is an all-day event.
},
"status": "confirmed", # Status of the event. Optional. Possible values are:
# - "confirmed" - The event is confirmed. This is the default status.
# - "tentative" - The event is tentatively confirmed.
# - "cancelled" - The event is cancelled (deleted). The list method returns
# cancelled events only on incremental sync (when syncToken or
# updatedMin are specified) or if the showDeleted flag is set to
# true. The get method always returns them. A cancelled status
# represents two different states depending on the event type:
# - Cancelled exceptions of an uncancelled recurring event
# indicate that this instance should no longer be presented
# to the user. Clients should store these events for the
# lifetime of the parent recurring event.
# Cancelled exceptions are only guaranteed to have values
# for the id, recurringEventId and originalStartTime fields
# populated. The other fields might be empty.
# - All other cancelled events represent deleted events.
# Clients should remove their locally synced copies. Such
# cancelled events will eventually disappear, so do not
# rely on them being available indefinitely.
# Deleted events are only guaranteed to have the id field populated. On the
# organizer's calendar, cancelled events continue to expose event details
# (summary, location, etc.) so that they can be restored (undeleted). Similarly,
# the events to which the user was invited and that they manually removed continue
# to provide details. However, incremental sync requests with showDeleted set to
# false will not return these details.
# If an event changes its organizer (for example via the move operation) and the
# original organizer is not on the attendee list, it will leave behind a cancelled
# event where only the id field is guaranteed to be populated.
"transparency": "opaque", # Whether the event blocks time on the calendar. Optional. Possible values are:
# - "opaque" - Default value. The event does block time on the calendar. This
# is equivalent to setting Show me as to Busy in the Calendar UI.
# - "transparent" - The event does not block time on the calendar. This is
# equivalent to setting Show me as to Available in the
# Calendar UI.
}
def getBody(self):
# Return the Event Body
return self.__body
if __name__ == "__main__":
g = gCalIntegratinator()
| 46.711712
| 129
| 0.588091
| 2,142
| 20,740
| 5.625117
| 0.218954
| 0.027886
| 0.011287
| 0.009959
| 0.246079
| 0.217362
| 0.173292
| 0.152461
| 0.13304
| 0.112374
| 0
| 0.002532
| 0.352652
| 20,740
| 443
| 130
| 46.817156
| 0.894905
| 0.45323
| 0
| 0.210843
| 0
| 0
| 0.154283
| 0.017957
| 0
| 0
| 0
| 0
| 0
| 1
| 0.096386
| false
| 0
| 0.036145
| 0.006024
| 0.23494
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
34c1c0f2296ec9a8cff26832714ccf9c61244f45
| 961
|
py
|
Python
|
2017/February/2_maxcross/maxcross.py
|
alantao5056/USACO_Silver
|
6998cb916692af58a0b40b1a4aff0708ee1106b8
|
[
"MIT"
] | null | null | null |
2017/February/2_maxcross/maxcross.py
|
alantao5056/USACO_Silver
|
6998cb916692af58a0b40b1a4aff0708ee1106b8
|
[
"MIT"
] | null | null | null |
2017/February/2_maxcross/maxcross.py
|
alantao5056/USACO_Silver
|
6998cb916692af58a0b40b1a4aff0708ee1106b8
|
[
"MIT"
] | null | null | null |
def getMinFix(K, N, signals):
if K == 1:
if set(signals) == {False}:
return 1
return 0
startInd = 0
endInd = K - 1
lastStart = signals[0]
curCount = signals[0:K].count(False)
minCount = curCount
for _ in range(N-K):
startInd += 1
endInd += 1
if not lastStart:
curCount -= 1
if not signals[endInd]:
curCount += 1
lastStart = signals[startInd]
minCount = min(minCount, curCount)
return minCount
def main(inputFile, outputFile):
maxcrossInput = open(inputFile, 'r')
maxcrossOutput = open(outputFile, 'w')
N, K, B = maxcrossInput.readline().strip().split()
N, K, B = int(N), int(K), int(B)
signals = [True] * N
for _ in range(B):
signals[int(maxcrossInput.readline().strip()) - 1] = False
# print(signals)
maxcrossOutput.write(str(getMinFix(K, N, signals)) + '\n')
maxcrossInput.close()
maxcrossOutput.close()
main('maxcross.in', 'maxcross.out')
| 20.891304
| 62
| 0.612903
| 122
| 961
| 4.811475
| 0.344262
| 0.015332
| 0.037479
| 0.061329
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.016438
| 0.240375
| 961
| 46
| 63
| 20.891304
| 0.787671
| 0.014568
| 0
| 0
| 0
| 0
| 0.028541
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.0625
| false
| 0
| 0
| 0
| 0.15625
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
34c2dd9c20a2135a93d6b5c256d90be592b639fa
| 752
|
py
|
Python
|
Python/leetcode2/41. First Missing Positive.py
|
darrencheng0817/AlgorithmLearning
|
aec1ddd0c51b619c1bae1e05f940d9ed587aa82f
|
[
"MIT"
] | 2
|
2015-12-02T06:44:01.000Z
|
2016-05-04T21:40:54.000Z
|
Python/leetcode2/41. First Missing Positive.py
|
darrencheng0817/AlgorithmLearning
|
aec1ddd0c51b619c1bae1e05f940d9ed587aa82f
|
[
"MIT"
] | null | null | null |
Python/leetcode2/41. First Missing Positive.py
|
darrencheng0817/AlgorithmLearning
|
aec1ddd0c51b619c1bae1e05f940d9ed587aa82f
|
[
"MIT"
] | null | null | null |
'''
Given an unsorted integer array, find the smallest missing positive integer.
Example 1:
Input: [1,2,0]
Output: 3
Example 2:
Input: [3,4,-1,1]
Output: 2
Example 3:
Input: [7,8,9,11,12]
Output: 1
Note:
Your algorithm should run in O(n) time and uses constant extra space.
'''
class Solution:
def firstMissingPositive(self, nums):
if not nums:
return 1
for i, num in enumerate(nums):
if 0<num<len(nums):
print(nums, i, num)
nums[i], nums[num-1] = nums[num-1], nums[i]
print(nums)
for i, num in enumerate(nums):
if num != i+1:
return i+1
return len(nums)+1
s = Solution()
print(s.firstMissingPositive([-1,4,2,1,9,10]))
| 21.485714
| 76
| 0.575798
| 118
| 752
| 3.669492
| 0.457627
| 0.04157
| 0.032333
| 0.04157
| 0.110855
| 0.110855
| 0.110855
| 0
| 0
| 0
| 0
| 0.06391
| 0.292553
| 752
| 35
| 77
| 21.485714
| 0.75
| 0.364362
| 0
| 0.133333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.066667
| false
| 0
| 0
| 0
| 0.333333
| 0.2
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
34c375e2a66eb6bf3befc20ceb9878fbf3112409
| 6,531
|
py
|
Python
|
4/figs/figX9/entropy_comparison_ew_hsm_igm.py
|
t-young31/thesis
|
2dea31ef64f4b7d55b8bdfc2094bab6579a529e0
|
[
"MIT"
] | null | null | null |
4/figs/figX9/entropy_comparison_ew_hsm_igm.py
|
t-young31/thesis
|
2dea31ef64f4b7d55b8bdfc2094bab6579a529e0
|
[
"MIT"
] | null | null | null |
4/figs/figX9/entropy_comparison_ew_hsm_igm.py
|
t-young31/thesis
|
2dea31ef64f4b7d55b8bdfc2094bab6579a529e0
|
[
"MIT"
] | null | null | null |
"""
Calculate the translational entropy with EW, HSM and IGM models
"""
import numpy as np
import matplotlib.pyplot as plt
from scipy import integrate
plt.style.use("paper")
ha2kjmol = 627.5 * 4.184
class Constants:
hbar_au = 1.0
h_au = hbar_au * 2.0 * np.pi
kb_au = 3.1668114E-6 # hartrees K-1
h_SI = 6.62607004E-34 # J s
na = 6.02214086E23 # molecules mol-1
kb_SI = 1.38064852E-23 # J K-1
kb_JKmol = kb_SI * na # J K-1 mol-1
kb_kcalKmol = kb_SI / (4.184 * 1000) # kcal K-1 mol-1
k_b = kb_kcalKmol
n_a = 6.022140857E23 # molecules mol-1
r = k_b * n_a # J K-1 mol-1
h = 6.62607004E-34 # J s
atm_to_pa = 101325 # Pa
dm_to_m = 0.1 # m
amu_to_kg = 1.660539040E-27 # Kg
c = 299792458 # m s-1
c_in_cm = c * 100 # cm s-1
ang_to_m = 1E-10 # m
ang_to_au = 1.88973 # au Å-1
m_to_ang = 1E10 # Å
m_to_bohr = 1.89e+10 # au m-1
amu_to_au = 1822.888486 # m_e amu-1
kj_mol_to_au = 0.00038087980 # Ha (kJ mol-1)-1
kcal_mol_to_au = 0.001593601 # Ha (kcal mol-1)-1
inverse_ang_inverse_au = 1.0 / 1.88973 # au-1 Å
class Solute:
def _s_t_igm(self, length_au):
"""
S = k_B T dln(q_t)/dT + k_B ln(q_t)
= n k_B ((T / q) (3q / 2T) + ln(q)) dq/dT = 3q / 2T
= n k_B (3/2 + ln(q))
:param mass_au:
:param temp_K:
:param length_au:
:return:
"""
q_t = np.sqrt((2.0 * np.pi * self.mass_au * Constants.kb_au * temp_K) / (
Constants.h_au ** 2)) ** 3 * length_au ** 3
return Constants.r * (1.5 + np.log(q_t))
@property
def s_t_igm_1atm(self):
return self._s_t_igm(length_au=self.l_1atm_au)
@property
def s_t_igm_1m(self):
return self._s_t_igm(length_au=self.l_1molar_au)
def s_t_hsm(self, omega_au):
"""
S = k_B T dln(q_t)/dT + k_B ln(q_t)
= (n k_B T) d ln(q)/dT + n k_B ln(q)
= n k_B (3 hbar omega beta coth(hbar omega beta / 2) / 2T + ln(q))
:param temp_K:
:param omega_au:
:return:
"""
raise NotImplementedError
beta_au = 1.0 / (Constants.kb_au * temp_K)
q_t = 1.0 / (2.0 * np.sinh(
Constants.hbar_au * omega_au * beta_au / 2.0)) ** 3
term1 = 3.0 * (Constants.hbar_au * omega_au * beta_au / 2.0) / np.tanh(
Constants.hbar_au * omega_au * beta_au / 2.0)
return Constants.r * (term1 + np.log(q_t))
def _q_t_ew(self):
def exp_integrand(r, beta, a, b):
return r ** 2 * np.exp(- beta * a * np.exp(b * r))
cap_lambda = ((2.0 * self.mass_au * np.pi) / (
beta_au * Constants.h_au ** 2)) ** 1.5
integral = integrate.quad(exp_integrand, 0.0, 10.0,
args=(beta_au, self.a_au, self.b_inv_au))[0]
return 4.0 * np.pi * np.exp(beta_au * self.a_au) * cap_lambda * integral
@property
def s_t_ew(self):
beta_au = 1.0 / (Constants.kb_au * temp_K)
q_t = self._q_t_ew()
def integrand(r, beta, a, b):
return r ** 2 * np.exp(-beta * a * (np.exp(b * r) - 1.0) + b * r)
integral = integrate.quad(integrand, 0.0, 10.0, args=(beta_au, self.a_au, self.b_inv_au))[0]
cap_lambda = ((2.0 * self.mass_au * np.pi) / (beta_au * Constants.h_au ** 2)) ** 1.5
term_4 = 4.0 * np.pi * (self.a_au * beta_au * cap_lambda / q_t) * integral
analytic_s = Constants.r * (1.5 - self.a_au * beta_au + np.log(q_t) + term_4)
# d_temp = 1E-10
# dlnq_dtemp = ((np.log(
# q_t_ew(1.0 / (Constants.kb_au * (temp_K + d_temp)), self.a_au, self.b_inv_au,
# mass_au)) - np.log(q_t))
# / d_temp)
# numerical_s = Constants.r * (temp_K * dlnq_dtemp + np.log(q_t))
# print('Numerical derivative / analytic derivative = ', numerical_s / analytic_s)
return analytic_s
def __init__(self, mass_amu, k_kcal, a_inv_ang):
self.mass_au = mass_amu * Constants.amu_to_au
self.a_au = k_kcal * Constants.kcal_mol_to_au
self.b_inv_au = a_inv_ang * Constants.inverse_ang_inverse_au
# Harmonic oscillator
# k_au = k_kjmol * Constants.kj_mol_to_au
# omega_au = np.sqrt(k_au / mass_au)
v_eff_1atm_m3 = Constants.kb_SI * temp_K / Constants.atm_to_pa
l_1atm = v_eff_1atm_m3 ** (1 / 3) * Constants.m_to_ang
self.l_1atm_au = l_1atm * Constants.ang_to_au
v_eff_1molar_m3 = 1.0 / (Constants.n_a * (1.0 / Constants.dm_to_m) ** 3)
l_1molar = v_eff_1molar_m3 ** (1 / 3) * Constants.m_to_ang
self.l_1molar_au = l_1molar * Constants.ang_to_au
def ST(S):
return np.round(temp_K * S, decimals=3)
if __name__ == '__main__':
temp_K = 298.15
beta_au = 1.0 / (Constants.kb_au * temp_K)
Methane_Water = Solute(mass_amu=16.04, k_kcal=1.048, a_inv_ang=2.918)
Methane_Acetonitrile = Solute(mass_amu=16.04, k_kcal=0.529, a_inv_ang=2.793)
Methane_Benzene = Solute(mass_amu=16.04, k_kcal=0.679, a_inv_ang=2.736)
CO2_Water = Solute(mass_amu=44.01, k_kcal=0.545, a_inv_ang=4.075)
CO2_Acetonitrile = Solute(mass_amu=44.01, k_kcal=0.446 , a_inv_ang=2.93)
CO2_Benzene = Solute(mass_amu=44.01, k_kcal=0.415, a_inv_ang=3.431)
Alanine_Water = Solute(mass_amu=89.09, k_kcal=0.53, a_inv_ang=4.083)
Alanine_Acetonitrile = Solute(mass_amu=89.09, k_kcal=1.005, a_inv_ang=2.127)
Alanine_Benzene = Solute(mass_amu=89.09, k_kcal=0.368, a_inv_ang=2.878)
systems = [Methane_Water, Methane_Acetonitrile, Methane_Benzene,
CO2_Water, CO2_Acetonitrile, CO2_Benzene,
Alanine_Water, Alanine_Acetonitrile, Alanine_Benzene]
rels = []
for system in systems:
rel = ST(system.s_t_ew) / ST(system.s_t_igm_1m)
rels.append(rel)
print(ST(system.s_t_igm_1atm),
ST(system.s_t_igm_1m),
ST(system.s_t_ew),
'kcal mol-1',
sep=' & ')
print(np.average(np.array(rels)),
'±', np.std(np.array(rels))/np.sqrt(len(rels)))
| 34.739362
| 100
| 0.548767
| 1,087
| 6,531
| 3.00644
| 0.182153
| 0.009792
| 0.023562
| 0.012852
| 0.339963
| 0.27295
| 0.253978
| 0.229192
| 0.179927
| 0.129743
| 0
| 0.095433
| 0.326137
| 6,531
| 187
| 101
| 34.925134
| 0.646898
| 0.160925
| 0
| 0.056075
| 0
| 0
| 0.005466
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.093458
| false
| 0
| 0.028037
| 0.046729
| 0.457944
| 0.018692
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
34c9095464074f8f39e3db552d812f1238aad8c5
| 1,305
|
py
|
Python
|
main.py
|
Sirius1942/Agave_ui_tools
|
7789de1d40955046d2e40fbe1c552f4a082c1472
|
[
"MIT"
] | 1
|
2019-04-10T03:17:16.000Z
|
2019-04-10T03:17:16.000Z
|
main.py
|
Sirius1942/Agave_ui_tools
|
7789de1d40955046d2e40fbe1c552f4a082c1472
|
[
"MIT"
] | null | null | null |
main.py
|
Sirius1942/Agave_ui_tools
|
7789de1d40955046d2e40fbe1c552f4a082c1472
|
[
"MIT"
] | null | null | null |
import sys
from PyQt5.QtWidgets import QApplication,QMainWindow,QDialog
from PyQt5 import QtCore, QtGui, QtWidgets
from ui.main_window import Ui_MainWindow
# from login import Ui_dialog
from lib.tcmd import TCmdClass
# from SignalsE import Example
class MyMainWindow(QMainWindow,Ui_MainWindow):
def __init__(self,parent=None):
super(MyMainWindow,self).__init__(parent)
self.setupUi(self)
self.cmdlists=TCmdClass()
def addtext(self):
_translate = QtCore.QCoreApplication.translate
self.textEdit.setHtml(_translate("MainWindow",self.getText("abc")))
# 每次修改内容,自动将光标移到最后
cursor = self.textEdit.textCursor()
cursor.movePosition(QtGui.QTextCursor.End)
self.textEdit.setTextCursor(cursor)
def getText(self,text):
self.cmdlists.addText(text)
return self.cmdlists.getText()
# class SignalsWindow(QWidget,SignalsExample):
# def __init__(self,parent=None):
# super(SignalsExample,self).__init__(parent)
# self.setupUi(self)
# def keyPressEvent(self, e):
# if e.key() == Qt.Key_Escape:
# self.close()
if __name__=="__main__":
app=QApplication(sys.argv)
myWin=MyMainWindow()
myWin.show()
# sig=SignalsWindow()
sys.exit(app.exec_())
| 24.622642
| 75
| 0.683525
| 145
| 1,305
| 5.931034
| 0.455172
| 0.04186
| 0.025581
| 0.039535
| 0.127907
| 0.127907
| 0
| 0
| 0
| 0
| 0
| 0.001932
| 0.206897
| 1,305
| 52
| 76
| 25.096154
| 0.828986
| 0.263602
| 0
| 0
| 0
| 0
| 0.022175
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.125
| false
| 0
| 0.208333
| 0
| 0.416667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
34ca769ede09a2256c0d08709d7ea01edfa2631c
| 1,675
|
py
|
Python
|
lib/python2.7/site-packages/setools/polcapquery.py
|
TinkerEdgeR-Android/prebuilts_python_linux-x86_2.7.5
|
5bcc5eb23dbb00d5e5dbf75835aa2fb79e8bafa2
|
[
"PSF-2.0"
] | null | null | null |
lib/python2.7/site-packages/setools/polcapquery.py
|
TinkerEdgeR-Android/prebuilts_python_linux-x86_2.7.5
|
5bcc5eb23dbb00d5e5dbf75835aa2fb79e8bafa2
|
[
"PSF-2.0"
] | null | null | null |
lib/python2.7/site-packages/setools/polcapquery.py
|
TinkerEdgeR-Android/prebuilts_python_linux-x86_2.7.5
|
5bcc5eb23dbb00d5e5dbf75835aa2fb79e8bafa2
|
[
"PSF-2.0"
] | 1
|
2020-05-14T05:25:00.000Z
|
2020-05-14T05:25:00.000Z
|
# Copyright 2014-2015, Tresys Technology, LLC
#
# This file is part of SETools.
#
# SETools is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation, either version 2.1 of
# the License, or (at your option) any later version.
#
# SETools is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with SETools. If not, see
# <http://www.gnu.org/licenses/>.
#
import logging
from .mixins import MatchName
from .query import PolicyQuery
class PolCapQuery(MatchName, PolicyQuery):
"""
Query SELinux policy capabilities
Parameter:
policy The policy to query.
Keyword Parameters/Class attributes:
name The name of the policy capability to match.
name_regex If true, regular expression matching will
be used for matching the name.
"""
def __init__(self, policy, **kwargs):
super(PolCapQuery, self).__init__(policy, **kwargs)
self.log = logging.getLogger(__name__)
def results(self):
"""Generator which yields all matching policy capabilities."""
self.log.info("Generating policy capability results from {0.policy}".format(self))
self._match_name_debug(self.log)
for cap in self.policy.polcaps():
if not self._match_name(cap):
continue
yield cap
| 31.603774
| 90
| 0.699104
| 227
| 1,675
| 5.079295
| 0.537445
| 0.017346
| 0.031223
| 0.049436
| 0.08673
| 0.08673
| 0.058977
| 0
| 0
| 0
| 0
| 0.008514
| 0.228657
| 1,675
| 52
| 91
| 32.211538
| 0.883901
| 0.608358
| 0
| 0
| 0
| 0
| 0.087542
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.142857
| false
| 0
| 0.214286
| 0
| 0.428571
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
34ccc2dc6b0cda2dc80e2e73e7a9e34065db3f8d
| 826
|
py
|
Python
|
casepro/msgs/migrations/0040_outgoing_as_single_pt1.py
|
rapidpro/ureport-partners
|
16e5b95eae36ecbbe8ab2a59f34a2f5fd32ceacd
|
[
"BSD-3-Clause"
] | 21
|
2015-07-21T15:57:49.000Z
|
2021-11-04T18:26:35.000Z
|
casepro/msgs/migrations/0040_outgoing_as_single_pt1.py
|
rapidpro/ureport-partners
|
16e5b95eae36ecbbe8ab2a59f34a2f5fd32ceacd
|
[
"BSD-3-Clause"
] | 357
|
2015-05-22T07:26:45.000Z
|
2022-03-12T01:08:28.000Z
|
casepro/msgs/migrations/0040_outgoing_as_single_pt1.py
|
rapidpro/ureport-partners
|
16e5b95eae36ecbbe8ab2a59f34a2f5fd32ceacd
|
[
"BSD-3-Clause"
] | 24
|
2015-05-28T12:30:25.000Z
|
2021-11-19T01:57:38.000Z
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [("msgs", "0039_outgoing_text_non_null")]
operations = [
migrations.AlterField(
model_name="outgoing",
name="backend_id",
field=models.IntegerField(help_text="Broadcast id from the backend", null=True),
),
migrations.RenameField(model_name="outgoing", old_name="backend_id", new_name="backend_broadcast_id"),
migrations.RemoveField(model_name="outgoing", name="recipient_count"),
migrations.AddField(
model_name="outgoing",
name="contact",
field=models.ForeignKey(to="contacts.Contact", null=True, on_delete=models.PROTECT),
),
]
| 33.04
| 110
| 0.654964
| 87
| 826
| 5.965517
| 0.54023
| 0.069364
| 0.131021
| 0.121387
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.007776
| 0.22155
| 826
| 24
| 111
| 34.416667
| 0.799378
| 0.025424
| 0
| 0.222222
| 0
| 0
| 0.211706
| 0.033624
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.111111
| 0
| 0.277778
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
34da6249230478c06324343ddaaf9e58a8828973
| 22,665
|
py
|
Python
|
tests/python3/test_lambda.py
|
pecigonzalo/aws-lambda-ddns-function
|
06e6c06bced80611238734d202deb284a5680813
|
[
"Apache-2.0"
] | 120
|
2018-02-14T21:36:45.000Z
|
2022-03-23T20:52:17.000Z
|
tests/python3/test_lambda.py
|
pecigonzalo/aws-lambda-ddns-function
|
06e6c06bced80611238734d202deb284a5680813
|
[
"Apache-2.0"
] | 17
|
2018-03-29T09:21:23.000Z
|
2021-04-21T21:48:42.000Z
|
tests/python3/test_lambda.py
|
pecigonzalo/aws-lambda-ddns-function
|
06e6c06bced80611238734d202deb284a5680813
|
[
"Apache-2.0"
] | 70
|
2018-02-15T13:03:05.000Z
|
2022-02-24T13:52:43.000Z
|
import os
import sys
import boto3
import boto
import moto
import botocore
import unittest
import logging
import re
import sure
import botocore.session
from datetime import datetime
from moto import mock_sns_deprecated, mock_sqs_deprecated
from botocore.stub import Stubber
from freezegun import freeze_time
from mock import patch
#from moto import mock_dynamodb2, mock_dynamodb2_deprecated
#from moto.dynamodb2 import dynamodb_backend2
from moto import mock_ec2, mock_ec2_deprecated, mock_route53
myPath = os.path.dirname(os.path.abspath(__file__))
sys.path.insert(0,myPath+'/..')
from union_python3 import publish_to_sns, delete_item_from_dynamodb_table, get_subnet_cidr_block, get_item_from_dynamodb_table, list_hosted_zones, get_hosted_zone_properties, is_dns_support_enabled, is_dns_hostnames_enabled, associate_zone, create_reverse_lookup_zone, get_reversed_domain_prefix, reverse_list, get_dhcp_configurations, create_dynamodb_table, list_tables, put_item_in_dynamodb_table, get_dynamodb_table, create_table, change_resource_recordset, create_resource_record, delete_resource_record, get_zone_id, is_valid_hostname, get_dhcp_option_set_id_for_vpc
try:
import boto.dynamodb2
except ImportError:
print("This boto version is not supported")
logging.basicConfig(level=logging.DEBUG)
os.environ["AWS_ACCESS_KEY_ID"] = '1111'
os.environ["AWS_SECRET_ACCESS_KEY"] = '2222'
class TestLambda(unittest.TestCase):
def test_get_subnet_cidr_block(selt):
mock = moto.mock_ec2()
mock.start()
client = boto3.client('ec2', region_name='us-east-1')
vpc = client.create_vpc(CidrBlock="10.0.0.0/16")
subnet = client.create_subnet(VpcId=vpc['Vpc']['VpcId'], CidrBlock="10.0.0.0/18")
results = get_subnet_cidr_block(client, subnet['Subnet']['SubnetId'] )
assert results == '10.0.0.0/18'
mock.stop()
def test_listed_hosted_zones(self):
mock = moto.mock_route53()
mock.start()
client = boto3.client(
'route53',
region_name='us-east-1',
aws_access_key_id=os.environ["AWS_ACCESS_KEY_ID"],
aws_secret_access_key=os.environ["AWS_SECRET_ACCESS_KEY"],
aws_session_token='123',
)
response = client.create_hosted_zone(
Name='test4',
VPC={
'VPCRegion': 'us-east-1',
'VPCId': 'vpc-43248d39'
},
CallerReference='string',
HostedZoneConfig={
'Comment': 'string',
'PrivateZone': True
}
)
hosted_zone_id = response['HostedZone']['Id']
response = list_hosted_zones(client)
assert response['HostedZones'][0]['Name'] == 'test4.'
mock.stop()
def test_get_hosted_zone_properties(self):
mock = moto.mock_route53()
mock.start()
client = boto3.client(
'route53',
region_name='us-east-1',
aws_access_key_id=os.environ["AWS_ACCESS_KEY_ID"],
aws_secret_access_key=os.environ["AWS_SECRET_ACCESS_KEY"],
aws_session_token='123',
)
response = client.create_hosted_zone(
Name='string',
VPC={
'VPCRegion': 'us-east-1',
'VPCId': 'vpc-43248d39'
},
CallerReference='string',
HostedZoneConfig={
'Comment': 'string',
'PrivateZone': True
}
)
hosted_zone_id = response['HostedZone']['Id']
response = get_hosted_zone_properties(client, hosted_zone_id)
assert response['HostedZone']['Id'] == hosted_zone_id
mock.stop()
def test_is_dns_support_enabled(self):
mock = moto.mock_ec2()
mock.start()
client = boto3.client('ec2', region_name='us-east-1')
dhcp_options = client.create_dhcp_options(
DhcpConfigurations=[
{
'Key': 'example.com',
'Values': [
'10.0.0.6',
'10.0.0.7'
]
}
]
)
print('dhcp options: '+str(dhcp_options))
vpc1 = client.create_vpc(CidrBlock="10.0.0.0/16")
print('vpc1: '+str(vpc1))
response = client.modify_vpc_attribute(
EnableDnsSupport={
'Value': True
},
VpcId=vpc1['Vpc']['VpcId']
)
print('response: '+str(response))
results = is_dns_support_enabled(client, vpc1['Vpc']['VpcId'])
print('results: '+str(results))
assert results == True
mock.stop()
def test_is_dns_hostnames_enabled(self):
mock = moto.mock_ec2()
mock.start()
client = boto3.client('ec2', region_name='us-east-1')
dhcp_options = client.create_dhcp_options(
DhcpConfigurations=[
{
'Key': 'example.com',
'Values': [
'10.0.0.6',
'10.0.0.7'
]
}
]
)
print('dhcp options: '+str(dhcp_options))
vpc1 = client.create_vpc(CidrBlock="10.0.0.0/16")
print('vpc1: '+str(vpc1))
response = client.modify_vpc_attribute(
EnableDnsHostnames={
'Value': True
},
VpcId=vpc1['Vpc']['VpcId']
)
print('response: '+str(response))
results = is_dns_hostnames_enabled(client, vpc1['Vpc']['VpcId'])
print('results: '+str(results))
assert results == True
mock.stop()
@unittest.skip("moto need associate vpc added")
def test_associate_zone(self):
mock = moto.mock_route53()
mock.start()
client = boto3.client(
'route53',
region_name='us-east-1',
aws_access_key_id=os.environ["AWS_ACCESS_KEY_ID"],
aws_secret_access_key=os.environ["AWS_SECRET_ACCESS_KEY"],
aws_session_token='123',
)
response = client.create_hosted_zone(
Name='string',
VPC={
'VPCRegion': 'us-east-1',
'VPCId': 'vpc-43248d39'
},
CallerReference='string',
HostedZoneConfig={
'Comment': 'string',
'PrivateZone': True
}
)
hosted_zone_id = response['HostedZone']['Id']
print('response: '+str(response))
results = associate_zone(client, hosted_zone_id, 'us-east-1', 'vpc-43248d39')
assert results == 'test'
mock.stop()
def test_create_reverse_lookup_zone(self):
instance = {
'Reservations' :[
{
'Instances': [
{
'VpcId': '123'
}
]
}
]
}
mock = moto.mock_route53()
mock.start()
client = boto3.client('route53', region_name='us-east-1')
response = create_reverse_lookup_zone(client, instance, 'abc.', 'us-east-1')
assert response['HostedZone']['Name'] == 'abc.in-addr.arpa.'
mock.stop()
def test_get_reversed_domain_prefix_16(self):
results = get_reversed_domain_prefix(16, '10.0.0.1')
assert results == '10.0.0.'
def test_get_reversed_domain_prefix_24(self):
results = get_reversed_domain_prefix(24, '10.0.0.1')
assert results == '10.0.0.'
@patch('union_python3.publish_to_sns')
def test_reverse_list_with_invalid_ip(
self,
sns
):
sns.return_value == None
response = reverse_list('test')
assert response == None
def test_reverse_list(self):
results = reverse_list('172.168.3.7')
assert results == '7.3.168.172.'
def test_get_dhcp_configurations(self):
mock = moto.mock_ec2()
mock.start()
client = boto3.client('ec2', region_name='us-east-1')
dhcp_options = client.create_dhcp_options(
DhcpConfigurations=[
{
'Key': 'example.com',
'Values': [
'10.0.0.6',
'10.0.0.7'
]
}
]
)
print('dhcp options: '+str(dhcp_options))
vpc1 = client.create_vpc(CidrBlock="10.0.0.0/16")
print('vpc1: '+str(vpc1))
vpc2 = client.create_vpc(CidrBlock="10.0.0.0/16")
print('vpc2: '+str(vpc2))
vpc3 = client.create_vpc(CidrBlock="10.0.0.0/24")
print('vpc3: '+str(vpc3))
client.associate_dhcp_options(DhcpOptionsId=dhcp_options['DhcpOptions']['DhcpOptionsId'], VpcId=vpc1['Vpc']['VpcId'])
client.associate_dhcp_options(DhcpOptionsId=dhcp_options['DhcpOptions']['DhcpOptionsId'], VpcId=vpc2['Vpc']['VpcId'])
results = get_dhcp_configurations(client, dhcp_options['DhcpOptions']['DhcpOptionsId'] )
# Returning nothing now because moto needs fixed
assert results == []
mock.stop()
def test_create_dynamodb_table(self):
mock = moto.mock_dynamodb2()
mock.start()
client = boto3.client('dynamodb', region_name='us-east-1')
results = create_dynamodb_table(client, 'DDNS')
assert results['TableDescription']['TableName'] == 'DDNS'
mock.stop()
def test_get_dhcp_option_set_id_for_vpc(self):
SAMPLE_DOMAIN_NAME = u'example.com'
SAMPLE_NAME_SERVERS = [u'10.0.0.6', u'10.0.0.7']
mock = moto.mock_ec2()
mock.start()
client = boto3.client('ec2', region_name='us-east-1')
dhcp_options = client.create_dhcp_options(
DhcpConfigurations=[
{
'Key': 'example.com',
'Values': [
'10.0.0.6',
'10.0.0.7'
]
}
]
)
print('dhcp options: '+str(dhcp_options))
vpc1 = client.create_vpc(CidrBlock="10.0.0.0/16")
print('vpc1: '+str(vpc1))
vpc2 = client.create_vpc(CidrBlock="10.0.0.0/16")
print('vpc2: '+str(vpc2))
vpc3 = client.create_vpc(CidrBlock="10.0.0.0/24")
print('vpc3: '+str(vpc3))
client.associate_dhcp_options(DhcpOptionsId=dhcp_options['DhcpOptions']['DhcpOptionsId'], VpcId=vpc1['Vpc']['VpcId'])
client.associate_dhcp_options(DhcpOptionsId=dhcp_options['DhcpOptions']['DhcpOptionsId'], VpcId=vpc2['Vpc']['VpcId'])
#vpcs = client.describe_vpcs(Filters=[{'Name': 'dhcp-options-id', 'Values': [dhcp_options['DhcpOptions']['DhcpOptionsId']]}])
results = get_dhcp_option_set_id_for_vpc(client, vpc1['Vpc']['VpcId'])
assert results == dhcp_options['DhcpOptions']['DhcpOptionsId']
mock.stop()
def test_is_invalid_hostname(self):
results = is_valid_hostname( None)
assert results == False
def test_is_valid_hostname(self):
results = is_valid_hostname( 'test')
assert results == True
def test_get_zone_id(self):
mock = moto.mock_route53()
mock.start()
client = boto3.client('route53', region_name='us-east-1')
client.create_hosted_zone(
Name="db.",
CallerReference=str(hash('foo')),
HostedZoneConfig=dict(
PrivateZone=True,
Comment="db",
)
)
zones = client.list_hosted_zones_by_name(DNSName="db.")
hosted_zone_id = zones["HostedZones"][0]["Id"]
# Create A Record.
a_record_endpoint_payload = {
'Comment': 'Create A record prod.redis.db',
'Changes': [
{
'Action': 'CREATE',
'ResourceRecordSet': {
'Name': 'prod.redis.db.',
'Type': 'A',
'TTL': 10,
'ResourceRecords': [{
'Value': '127.0.0.1'
}]
}
}
]
}
client.change_resource_record_sets(HostedZoneId=hosted_zone_id, ChangeBatch=a_record_endpoint_payload)
results = get_zone_id( client, 'db.')
assert len(results) == 15
mock.stop()
def test_delete_resource_record(self):
mock = moto.mock_route53()
mock.start()
client = boto3.client('route53', region_name='us-east-1')
client.create_hosted_zone(
Name="db.",
CallerReference=str(hash('foo')),
HostedZoneConfig=dict(
PrivateZone=True,
Comment="db",
)
)
zones = client.list_hosted_zones_by_name(DNSName="db.")
hosted_zone_id = zones["HostedZones"][0]["Id"]
# Create A Record.
a_record_endpoint_payload = {
'Comment': 'Create A record prod.redis.db',
'Changes': [
{
'Action': 'CREATE',
'ResourceRecordSet': {
'Name': 'prod.redis.db.',
'Type': 'A',
'TTL': 10,
'ResourceRecords': [{
'Value': '127.0.0.1'
}]
}
}
]
}
client.change_resource_record_sets(HostedZoneId=hosted_zone_id, ChangeBatch=a_record_endpoint_payload)
results = delete_resource_record( client, hosted_zone_id,'prod','redis.db.','A','127.0.0.1')
assert results['ChangeInfo']['Status'] == 'INSYNC'
mock.stop()
def test_create_resource_record(self):
mock = moto.mock_route53()
mock.start()
client = boto3.client('route53', region_name='us-east-1')
client.create_hosted_zone(
Name="db.",
CallerReference=str(hash('foo')),
HostedZoneConfig=dict(
PrivateZone=True,
Comment="db",
)
)
zones = client.list_hosted_zones_by_name(DNSName="db.")
hosted_zone_id = zones["HostedZones"][0]["Id"]
results = create_resource_record( client, hosted_zone_id,'prod','redis.db.','A','127.0.0.1')
assert results['ChangeInfo']['Status'] == 'INSYNC'
mock.stop()
def test_change_resource_recordset(self):
mock = moto.mock_route53()
mock.start()
client = boto3.client('route53', region_name='us-east-1')
client.create_hosted_zone(
Name="db.",
CallerReference=str(hash('foo')),
HostedZoneConfig=dict(
PrivateZone=True,
Comment="db",
)
)
zones = client.list_hosted_zones_by_name(DNSName="db.")
hosted_zone_id = zones["HostedZones"][0]["Id"]
results = change_resource_recordset( client, hosted_zone_id,'prod','redis.db.','A','127.0.0.1')
assert results['ChangeInfo']['Status'] == 'INSYNC'
mock.stop()
def test_create_table(self):
mock = moto.mock_dynamodb2()
mock.start()
client = boto3.client("dynamodb")
results = create_table(client, 'DDNS')
assert results == True
mock.stop()
def test_get_dynamodb_table(self):
mock = moto.mock_dynamodb2()
mock.start()
client = boto3.client("dynamodb")
client.create_table(TableName="DDNS"
, KeySchema=[{"AttributeName": "id", "KeyType": "HASH"}]
, AttributeDefinitions=[{"AttributeName": "id", "AttributeType": "S"}]
, ProvisionedThroughput={"ReadCapacityUnits": 1, "WriteCapacityUnits": 1})
results = get_dynamodb_table(client, 'DDNS')
assert results['Table']['TableName'] == 'DDNS'
mock.stop()
def test_list_tables(self):
dynamodb_client = botocore.session.get_session().create_client('dynamodb','us-east-1')
dynamodb_client_stubber = Stubber(dynamodb_client)
response = {
'TableNames': [
'DDNS',
],
'LastEvaluatedTableName': 'DDNS'
}
expected_params = {}
dynamodb_client_stubber.add_response('list_tables', response, expected_params)
with dynamodb_client_stubber:
results = list_tables(dynamodb_client)
assert results['TableNames'][0]== 'DDNS'
def test_put_item_in_dynamodb_table(self):
dynamodb_client = botocore.session.get_session().create_client('dynamodb','us-east-1')
dynamodb_client_stubber = Stubber(dynamodb_client)
response = {
'Attributes': {
'InstanceId': {
'S': '123',
'NULL': True,
'BOOL': True
},
'InstanceAttributes': {
'S': '123',
'NULL': True,
'BOOL': True
}
},
'ConsumedCapacity': {
'TableName': 'string',
'CapacityUnits': 123.0,
'ReadCapacityUnits': 123.0,
'WriteCapacityUnits': 123.0,
'Table': {
'ReadCapacityUnits': 123.0,
'WriteCapacityUnits': 123.0,
'CapacityUnits': 123.0
},
'LocalSecondaryIndexes': {
'string': {
'ReadCapacityUnits': 123.0,
'WriteCapacityUnits': 123.0,
'CapacityUnits': 123.0
}
},
'GlobalSecondaryIndexes': {
'string': {
'ReadCapacityUnits': 123.0,
'WriteCapacityUnits': 123.0,
'CapacityUnits': 123.0
}
}
},
'ItemCollectionMetrics': {
'ItemCollectionKey': {
'string': {
'S': 'string',
'N': 'string',
'B': b'bytes',
'SS': [
'string',
],
'NS': [
'string',
],
'BS': [
b'bytes',
],
'M': {
'string': {}
},
'L': [
{},
],
'NULL': True,
'BOOL': True
}
},
'SizeEstimateRangeGB': [
123.0,
]
}
}
expected_params = {
'TableName': 'DDNS',
'Item': {
'InstanceId': {'S':'123'},
'InstanceAttributes': {'S':'123'}
}
}
dynamodb_client_stubber.add_response('put_item', response, expected_params)
with dynamodb_client_stubber:
results = put_item_in_dynamodb_table(dynamodb_client, 'DDNS', '123','123')
assert results == response
def test_get_item_from_dynamodb_table(self):
mock = moto.mock_dynamodb2()
mock.start()
client = boto3.client('dynamodb',
region_name='us-west-2',
aws_access_key_id="ak",
aws_secret_access_key="sk")
results = create_table(client, 'DDNS')
print('results: '+str(results))
results = put_item_in_dynamodb_table(client, 'DDNS', '123', '123')
print('results: '+str(results))
results = get_item_from_dynamodb_table(client, 'DDNS', '123')
print('results: '+str(results))
assert results == 123
mock.stop()
def test_delete_item_from_dynamodb_table(self):
mock = moto.mock_dynamodb2()
mock.start()
client = boto3.client('dynamodb',
region_name='us-east-1',
aws_access_key_id="ak",
aws_secret_access_key="sk")
results = create_table(client, 'DDNS')
print('results: '+str(results))
results = put_item_in_dynamodb_table(client, 'DDNS', '123', '123')
print('results: '+str(results))
results = get_item_from_dynamodb_table(client, 'DDNS', '123')
print('results: '+str(results))
assert results == 123
results = delete_item_from_dynamodb_table(client, 'DDNS', '123')
print('results: '+str(results))
results = get_item_from_dynamodb_table(client, 'DDNS', '123')
print('results: ' + str(results))
assert results == None
mock.stop()
@unittest.skip("moto needs TopicArn added to publish")
@mock_sqs_deprecated
@mock_sns_deprecated
def test_publish_to_sns(self):
MESSAGE_FROM_SQS_TEMPLATE = '{\n "Message": "%s",\n "MessageId": "%s"\n}'
conn = boto.connect_sns()
conn.create_topic("some-topic")
topics_json = conn.get_all_topics()
topic_arn = topics_json["ListTopicsResponse"][
"ListTopicsResult"]["Topics"][0]['TopicArn']
sqs_conn = boto.connect_sqs()
sqs_conn.create_queue("test-queue")
conn.subscribe(topic_arn, "sqs",
"arn:aws:sqs:us-east-1:123456789012:test-queue")
message_to_publish = 'my message'
subject_to_publish = "test subject"
with freeze_time("2015-01-01 12:00:00"):
published_message = publish_to_sns(conn, '123456789012', 'us-east-1', message_to_publish)
published_message_id = published_message['MessageId']
queue = sqs_conn.get_queue("test-queue")
message = queue.read(1)
expected = MESSAGE_FROM_SQS_TEMPLATE % (
message_to_publish, published_message_id, subject_to_publish, 'us-east-1')
acquired_message = re.sub("\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}Z", '2015-01-01T12:00:00.000Z',
message.get_body())
acquired_message.should.equal(expected)
| 30.382038
| 571
| 0.528039
| 2,270
| 22,665
| 5.02511
| 0.126872
| 0.007189
| 0.015341
| 0.03156
| 0.662751
| 0.623477
| 0.573332
| 0.563601
| 0.554133
| 0.542299
| 0
| 0.038103
| 0.346923
| 22,665
| 745
| 572
| 30.422819
| 0.732536
| 0.013545
| 0
| 0.53777
| 0
| 0.001799
| 0.152924
| 0.01387
| 0
| 0
| 0
| 0
| 0.048561
| 1
| 0.048561
| false
| 0
| 0.035971
| 0
| 0.086331
| 0.046763
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
34db9103dcbc551abbdebff8ae585f4f1742d35b
| 1,929
|
py
|
Python
|
web/transiq/api/decorators.py
|
manibhushan05/transiq
|
763fafb271ce07d13ac8ce575f2fee653cf39343
|
[
"Apache-2.0"
] | null | null | null |
web/transiq/api/decorators.py
|
manibhushan05/transiq
|
763fafb271ce07d13ac8ce575f2fee653cf39343
|
[
"Apache-2.0"
] | 14
|
2020-06-05T23:06:45.000Z
|
2022-03-12T00:00:18.000Z
|
web/transiq/api/decorators.py
|
manibhushan05/transiq
|
763fafb271ce07d13ac8ce575f2fee653cf39343
|
[
"Apache-2.0"
] | null | null | null |
import json
from api.helper import json_405_response, json_error_response
def no_test(func):
"""
Use for URLs that do not require testing, use wisely
"""
def inner(request, *args, **kwargs):
return func(request, *args, **kwargs)
inner.__name__ = func.__name__
inner.__module__ = func.__module__
inner.__doc__ = func.__doc__
inner.__dict__ = func.__dict__
inner.do_not_test = True
return inner
def api_get(func):
def inner(request, *args, **kwargs):
if request.method != "GET":
return json_405_response()
request.data = {}
return func(request, *args, **kwargs)
inner.__name__ = func.__name__
inner.__module__ = func.__module__
inner.__doc__ = func.__doc__
inner.__dict__ = func.__dict__
return inner
def api_post(func):
def inner(request, *args, **kwargs):
if request.method != "POST":
return json_405_response()
try:
request.data = {} if not request.body else json.loads(request.body.decode('utf-8'))
except ValueError:
request.data = {}
return func(request, *args, **kwargs)
inner.__name__ = func.__name__
inner.__module__ = func.__module__
inner.__doc__ = func.__doc__
inner.__dict__ = func.__dict__
return inner
def authenticated_user(func):
def inner(request, *args, **kwargs):
if not request.user:
return json_error_response('no user present', 401)
if not request.user.is_authenticated:
return json_error_response('user is not authenticated', 401)
if not request.user.is_active:
return json_error_response('user authenticated but inactive', 401)
return func(request, *args, **kwargs)
inner.__name__ = func.__name__
inner.__module__ = func.__module__
inner.__doc__ = func.__doc__
inner.__dict__ = func.__dict__
return inner
| 30.619048
| 95
| 0.653188
| 234
| 1,929
| 4.74359
| 0.226496
| 0.079279
| 0.122523
| 0.068468
| 0.620721
| 0.54955
| 0.511712
| 0.483784
| 0.483784
| 0.404505
| 0
| 0.013058
| 0.245723
| 1,929
| 62
| 96
| 31.112903
| 0.749828
| 0.026957
| 0
| 0.64
| 0
| 0
| 0.0446
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.16
| false
| 0
| 0.04
| 0.02
| 0.46
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
34e14659ac3348a14f3cb971dd1656c1b96e47ab
| 4,917
|
py
|
Python
|
MIDI Remote Scripts/pushbase/step_duplicator.py
|
aarkwright/ableton_devices
|
fe5df3bbd64ccbc136bba722ba1e131a02969798
|
[
"MIT"
] | null | null | null |
MIDI Remote Scripts/pushbase/step_duplicator.py
|
aarkwright/ableton_devices
|
fe5df3bbd64ccbc136bba722ba1e131a02969798
|
[
"MIT"
] | null | null | null |
MIDI Remote Scripts/pushbase/step_duplicator.py
|
aarkwright/ableton_devices
|
fe5df3bbd64ccbc136bba722ba1e131a02969798
|
[
"MIT"
] | null | null | null |
# uncompyle6 version 3.3.5
# Python bytecode 2.7 (62211)
# Decompiled from: Python 3.7.3 (default, Apr 24 2019, 15:29:51) [MSC v.1915 64 bit (AMD64)]
# Embedded file name: c:\Jenkins\live\output\win_64_static\Release\python-bundle\MIDI Remote Scripts\pushbase\step_duplicator.py
# Compiled at: 2018-11-30 15:48:12
from __future__ import absolute_import, print_function, unicode_literals
from functools import partial
from ableton.v2.base import liveobj_valid, nop
from ableton.v2.control_surface import Component
from ableton.v2.control_surface.control import ButtonControl
from .consts import MessageBoxText
from .message_box_component import Messenger
ALL_NOTES = -1
def get_transposition_amount(source_step, destination_step):
transposition = destination_step[0] - source_step[0]
if ALL_NOTES == source_step[0]:
transposition = 0
elif destination_step[0] == ALL_NOTES:
transposition = source_step[0]
return transposition
class NullStepDuplicator(object):
@property
def is_duplicating(self):
return False
def set_clip(self, _):
pass
def set_loop(clip, loop_start, loop_end):
if loop_start >= clip.loop_end:
clip.loop_end = loop_end
if clip.loop_end == loop_end:
clip.loop_start = loop_start
clip.end_marker = loop_end
clip.start_marker = loop_start
else:
clip.loop_start = loop_start
if clip.loop_start == loop_start:
clip.loop_end = loop_end
clip.end_marker = loop_end
clip.start_marker = loop_start
class StepDuplicatorComponent(Component, Messenger):
button = ButtonControl()
def __init__(self, *a, **k):
super(StepDuplicatorComponent, self).__init__(*a, **k)
self._clip = None
self._source_step = None
self._notification_reference = partial(nop, None)
return
@property
def is_duplicating(self):
return self.button.is_pressed and liveobj_valid(self._clip)
def set_clip(self, clip):
self._cancel_duplicate()
self._clip = clip
def add_step_with_pitch(self, note, step_start, step_end, nudge_offset=0, is_page=False):
if self.is_enabled() and self.is_duplicating:
current_step = (note,
step_start,
step_end - step_start,
nudge_offset,
is_page)
if self._source_step is not None:
self._duplicate_to(current_step)
else:
self._duplicate_from(current_step)
return
def add_step(self, step_start, step_end, nudge_offset=0, is_page=False):
self.add_step_with_pitch(ALL_NOTES, step_start, step_end, nudge_offset, is_page)
def _duplicate_from(self, source_step):
message = MessageBoxText.CANNOT_COPY_EMPTY_PAGE if source_step[4] else MessageBoxText.CANNOT_COPY_EMPTY_STEP
from_pitch = source_step[0]
pitch_span = 1
if from_pitch == ALL_NOTES:
from_pitch = 0
pitch_span = 127
notes = self._clip.get_notes(source_step[1], from_pitch, source_step[2], pitch_span)
if len(notes) > 0:
message = MessageBoxText.COPIED_PAGE if source_step[4] else MessageBoxText.COPIED_STEP
self._source_step = source_step
self._notification_reference = self.show_notification(message)
def _duplicate_to(self, destination_step):
if self._source_step[4] == destination_step[4]:
message = MessageBoxText.CANNOT_PASTE_TO_SOURCE_PAGE if destination_step[4] else MessageBoxText.CANNOT_PASTE_TO_SOURCE_STEP
if destination_step != self._source_step:
message = MessageBoxText.PASTED_PAGE if destination_step[4] else MessageBoxText.PASTED_STEP
self._clip.duplicate_region(self._source_step[1], self._source_step[2], destination_step[1] + self._source_step[3], self._source_step[0], get_transposition_amount(self._source_step, destination_step))
else:
message = MessageBoxText.CANNOT_PASTE_FROM_STEP_TO_PAGE if destination_step[4] else MessageBoxText.CANNOT_PASTE_FROM_PAGE_TO_STEP
loop_start = destination_step[1]
loop_end = loop_start + self._source_step[2]
if destination_step[4] and not (loop_start >= self._clip.loop_start and loop_end <= self._clip.loop_end):
set_loop(self._clip, loop_start, loop_end)
self._notification_reference = self.show_notification(message)
self._source_step = None
return
def _cancel_duplicate(self):
self._source_step = None
if self._notification_reference() is not None:
self._notification_reference().hide()
return
@button.released
def button(self, _):
self._cancel_duplicate()
def update(self):
super(StepDuplicatorComponent, self).update()
self._cancel_duplicate()
| 39.653226
| 216
| 0.690462
| 642
| 4,917
| 4.934579
| 0.219626
| 0.078914
| 0.061869
| 0.026831
| 0.3125
| 0.207386
| 0.152146
| 0.084596
| 0.084596
| 0.052399
| 0
| 0.022667
| 0.228391
| 4,917
| 124
| 217
| 39.653226
| 0.812335
| 0.061623
| 0
| 0.27
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.14
| false
| 0.01
| 0.07
| 0.02
| 0.31
| 0.01
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
34e4d3ae291ecf089e466ddec64c7d9c23c88213
| 1,540
|
py
|
Python
|
Python/Examples/Macros/MoveAxis.py
|
halmusaibeli/RoboDK-API
|
e017aa26715bc8d0fcbbc05e57acc32f2d2d6174
|
[
"MIT"
] | null | null | null |
Python/Examples/Macros/MoveAxis.py
|
halmusaibeli/RoboDK-API
|
e017aa26715bc8d0fcbbc05e57acc32f2d2d6174
|
[
"MIT"
] | null | null | null |
Python/Examples/Macros/MoveAxis.py
|
halmusaibeli/RoboDK-API
|
e017aa26715bc8d0fcbbc05e57acc32f2d2d6174
|
[
"MIT"
] | null | null | null |
# This macro allows changing the position of an external axis by hand or within a program as a function call.
# Example of a function call (units are in mm or deg):
# MoveAxis(0)
# MoveAxis(100)
# https://robodk.com/doc/en/RoboDK-API.html
import sys # allows getting the passed argument parameters
from robodk.robodialogs import *
# Enter the name of the axis (leave empty to select the first mechanism/robot available
MECHANISM_NAME = ''
# Enter the default value:
DEFAULT_VALUE = 0
# Set to blocking to make the program wait until it the axis stopped moving
BLOCKING = True
# --------------- PROGRAM START -------------------------
VALUE = DEFAULT_VALUE
if len(sys.argv) < 2:
# Promt the user to enter a new value if the macro is just double clicked
print('This macro be called as MoveAxis(value)')
print('Number of arguments: ' + str(len(sys.argv)))
#raise Exception('Invalid parameters provided: ' + str(sys.argv))
entry = mbox('Move one axis. Enter the new value in mm or deg\n\nNote: this can be called as a program.\nExample: MoveAxis(VALUE)', entry=str(DEFAULT_VALUE))
if not entry:
#raise Exception('Operation cancelled by user')
quit()
VALUE = float(entry)
else:
# Take the argument as new joint value
VALUE = float(sys.argv[1])
# Use the RoboDK API:
from robodk.robolink import * # API to communicate with RoboDK
RDK = Robolink()
# Get the robot item:
axis = RDK.Item(MECHANISM_NAME, ITEM_TYPE_ROBOT)
# Move the robot/mechanism
axis.MoveJ([VALUE], BLOCKING)
| 32.765957
| 161
| 0.698701
| 234
| 1,540
| 4.568376
| 0.478632
| 0.044902
| 0.024322
| 0.016838
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.005613
| 0.19026
| 1,540
| 46
| 162
| 33.478261
| 0.851644
| 0.542857
| 0
| 0
| 0
| 0.052632
| 0.255474
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.157895
| 0
| 0.157895
| 0.105263
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
34e5b5fd754168ef6338e900c37a7a2ea6696ba4
| 3,126
|
py
|
Python
|
pgcsv/db.py
|
pudo/pgcsv
|
9a6ae352da2ae3de5953b8b1f4c48dfcab403a3e
|
[
"MIT"
] | 66
|
2017-02-05T19:36:03.000Z
|
2022-01-25T21:41:18.000Z
|
pgcsv/db.py
|
pudo/pgcsv
|
9a6ae352da2ae3de5953b8b1f4c48dfcab403a3e
|
[
"MIT"
] | 4
|
2020-05-19T20:26:13.000Z
|
2021-06-25T15:27:47.000Z
|
pgcsv/db.py
|
pudo/pgcsv
|
9a6ae352da2ae3de5953b8b1f4c48dfcab403a3e
|
[
"MIT"
] | 6
|
2017-12-02T15:37:38.000Z
|
2021-07-21T15:19:02.000Z
|
from psycopg2 import connect
from psycopg2.sql import SQL, Identifier, Literal, Composed
from collections import OrderedDict
from itertools import count
from pgcsv.util import normalize_column
class Database(object):
def __init__(self, uri, table, headers):
self.conn = connect(uri)
self.table = normalize_column(table)
self._raw_headers = headers
@property
def headers(self):
if not hasattr(self, '_headers'):
self._headers = OrderedDict()
for name in self._raw_headers:
normalized = normalize_column(name)
if normalized is None or not len(normalized):
normalized = 'column'
column = normalized
for i in count(2):
if column not in self._headers:
break
column = '%s_%s' % (normalized, i)
self._headers[column] = name
return self._headers
def drop(self):
with self.conn.cursor() as cursor:
stmt = SQL('DROP TABLE IF EXISTS {};')
stmt = stmt.format(Identifier(self.table))
# print stmt.as_string(cursor)
cursor.execute(stmt)
self.conn.commit()
def sync(self):
with self.conn.cursor() as cursor:
stmt = SQL("CREATE TABLE IF NOT EXISTS {} ();")
stmt = stmt.format(Identifier(self.table))
# print stmt.as_string(cursor)
cursor.execute(stmt)
stmt = SQL("SELECT column_name FROM "
"information_schema.columns "
"WHERE table_name = %s;") # noqa
cursor.execute(stmt, (self.table,))
columns = [c[0] for c in cursor.fetchall()]
# columns = [c.decode(self.conn.encoding) for c in columns]
for column, label in self.headers.items():
if column not in columns:
stmt = SQL("ALTER TABLE {} ADD COLUMN {} TEXT;")
stmt = stmt.format(Identifier(self.table),
Identifier(column))
# print stmt.as_string(cursor)
cursor.execute(stmt)
stmt = SQL("COMMENT ON COLUMN {}.{} IS {};")
stmt = stmt.format(Identifier(self.table),
Identifier(column),
Literal(label))
cursor.execute(stmt)
self.conn.commit()
def load(self, fh, delimiter):
with self.conn.cursor() as cursor:
headers = list(self.headers.keys())
stmt = SQL("COPY {} ({}) FROM STDIN "
"WITH CSV HEADER DELIMITER AS {} NULL AS ''")
columns = Composed([Identifier(c) for c in headers])
columns = columns.join(', ')
stmt = stmt.format(Identifier(self.table),
columns,
Literal(delimiter))
print(stmt.as_string(cursor))
cursor.copy_expert(stmt, fh)
self.conn.commit()
| 39.075
| 71
| 0.519834
| 323
| 3,126
| 4.95356
| 0.26935
| 0.04
| 0.04375
| 0.075
| 0.326875
| 0.326875
| 0.271875
| 0.24
| 0.17875
| 0.1325
| 0
| 0.002064
| 0.380038
| 3,126
| 79
| 72
| 39.56962
| 0.823529
| 0.047665
| 0
| 0.227273
| 0
| 0
| 0.094581
| 0.008751
| 0
| 0
| 0
| 0
| 0
| 1
| 0.075758
| false
| 0
| 0.075758
| 0
| 0.181818
| 0.015152
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
34ebcfd140d8b8342551373bb548dcc3e38235a3
| 11,609
|
py
|
Python
|
mixer.py
|
ejhumphrey/mixer_bingo
|
d78174384e4476de70348d3e17a72d45ff04d960
|
[
"0BSD"
] | null | null | null |
mixer.py
|
ejhumphrey/mixer_bingo
|
d78174384e4476de70348d3e17a72d45ff04d960
|
[
"0BSD"
] | null | null | null |
mixer.py
|
ejhumphrey/mixer_bingo
|
d78174384e4476de70348d3e17a72d45ff04d960
|
[
"0BSD"
] | null | null | null |
from __future__ import print_function
import argparse
import json
import jsonschema
import logging
import numpy as np
import networkx as nx
import os
import pandas as pd
import random
import sys
logger = logging.getLogger(name=__file__)
def _load_schema():
schema_file = os.path.join(os.path.dirname(__file__),
'participant_schema.json')
return json.load(open(schema_file))
__SCHEMA__ = _load_schema()
def validate(participant_data):
"""Check that a number of records conforms to the expected format.
Parameters
----------
participant_data : array_like of dicts
Collection of user records to validate.
Returns
-------
is_valid : bool
True if the provided data validates.
"""
is_valid = True
try:
jsonschema.validate(participant_data, __SCHEMA__)
except jsonschema.ValidationError as failed:
logger.debug("Schema Validation Failed: {}".format(failed))
is_valid = False
return is_valid
def tokenize(records):
"""Create a token mapping from objects to integers.
Parameters
----------
records : array_like of iterables.
Collection of nested arrays.
Returns
-------
enum_map : dict
Enumeration map of objects (any hashable) to tokens (int).
"""
unique_items = set(i for row in records for i in row)
unique_items = sorted(list(unique_items))
return dict([(k, n) for n, k in enumerate(unique_items)])
def items_to_bitmap(records, enum_map=None):
"""Turn a collection of sparse items into a binary bitmap.
Parameters
----------
records : iterable of iterables, len=n
Items to represent as a matrix.
enum_map : dict, or None, len=k
Token mapping items to ints; if None, one will be generated and
returned.
Returns
-------
bitmap : np.ndarray, shape=(n, k)
Active items.
enum_map : dict
Mapping of items to integers, if one is not given.
"""
return_mapping = False
if enum_map is None:
enum_map = tokenize(records)
return_mapping = True
bitmap = np.zeros([len(records), len(enum_map)], dtype=bool)
for idx, row in enumerate(records):
for i in row:
bitmap[idx, enum_map[i]] = True
return bitmap, enum_map if return_mapping else bitmap
def categorical_sample(pdf):
"""Randomly select a categorical index of a given PDF.
Parameters
----------
x
Returns
-------
y
"""
pdf = pdf / pdf.sum()
return int(np.random.multinomial(1, pdf).nonzero()[0])
WEIGHTING_FUNCTIONS = {
'l0': lambda x: float(np.sum(x) > 0),
'l1': lambda x: float(np.sum(x)),
'mean': lambda x: float(np.mean(x)),
'null': 0.0,
'euclidean': lambda x: np.sqrt(x),
'norm_euclidean': lambda x: np.sqrt(x) / 3.0,
'quadratic': lambda x: x,
'norm_quadratic': lambda x: x / 9.0
}
def build_graph(records, forced_edges=None, null_edges=None,
interest_func='l0', seniority_func='l0',
combination_func=np.sum):
"""writeme
Parameters
----------
data: pd.DataFrame
Loaded participant records
forced_edges: np.ndarray, or None
One-hot assignment matrix; no row or column can sum to more than one.
null_edges: np.ndarray, or None
Matches to set to zero.
interest_func: str
'l1', 'l0'
seniority_func: str
'l1', 'l0'
combination_func: function
Numpy functions, e.g. prod, sum, max.
Returns
-------
graph : networkx.Graph
Connected graph to be factored.
"""
if not isinstance(records, pd.DataFrame):
records = pd.DataFrame(records)
interest_bitmap, interest_enum = items_to_bitmap(records.interests)
# Coerce null / forced edges for datatype compliance.
null_edges = ([] if null_edges is None
else [tuple(v) for v in null_edges])
forced_edges = ([] if forced_edges is None
else [tuple(v) for v in forced_edges])
graph = nx.Graph()
for i, row_i in records.iterrows():
for j, row_j in records.iterrows():
# Skip self, shared affiliations, or same grouping
skip_conditions = [i == j,
(i, j) in null_edges,
(j, i) in null_edges,
row_i.affiliation == row_j.affiliation]
if any(skip_conditions):
continue
# Interest weighting
interest_weight = WEIGHTING_FUNCTIONS[interest_func](
interest_bitmap[i] * interest_bitmap[j])
# Seniority weighting
seniority_weight = WEIGHTING_FUNCTIONS[seniority_func](
(row_i.seniority - row_j.seniority) ** 2.0)
if (i, j) in forced_edges or (j, i) in forced_edges:
weights = [2.0 ** 32]
else:
weights = [interest_weight, seniority_weight]
graph.add_weighted_edges_from([(i, j, combination_func(weights))])
return graph
def harmonic_mean(values):
"""writeme
Parameters
----------
x
Returns
-------
y
"""
return np.power(np.prod(values), 1.0 / len(values))
def select_matches(records, k_matches=5, forced_edges=None, null_edges=None,
interest_func='l0', seniority_func='l0',
combination_func=np.sum, seed=None):
"""Pick affinity matches, and back-fill randomly if under-populated.
Parameters
----------
x
Returns
-------
y
"""
null_edges = ([] if null_edges is None
else [tuple(v) for v in null_edges])
forced_edges = ([] if forced_edges is None
else [tuple(v) for v in forced_edges])
matches = {i: set() for i in range(len(records))}
for k in range(k_matches):
graph = build_graph(
records, null_edges=null_edges, forced_edges=forced_edges,
seniority_func='quadratic', interest_func='mean',
combination_func=np.mean)
forced_edges = None
links = nx.max_weight_matching(graph)
for row, col in links.items():
null_edges += (row, col)
matches[row].add(col)
catch_count = 0
rng = np.random.RandomState(seed=seed)
for row in matches:
possible_matches = set(range(len(records)))
possible_matches = possible_matches.difference(matches[row])
while len(matches[row]) != k_matches:
col = rng.choice(np.asarray(possible_matches))
matches[row].add(col)
null_edges += [(row, col)]
catch_count += 1
logger.debug("backfilled %d" % catch_count)
return matches
def select_topic(row_a, row_b):
"""writeme
Parameters
----------
x
Returns
-------
y
"""
topics_a = parse_interests(row_a[7])
topics_b = parse_interests(row_b[7])
topics = list(set(topics_a).intersection(set(topics_b)))
if topics:
return topics[categorical_sample(np.ones(len(topics)))]
TEXT_FMTS = [
("Find someone from %s.", 'affiliation'),
("Find someone currently located in %s.", 'country'),
("Find someone who is an expert on %s", 'topics'),
("Find someone in academia at the %s level", 'education')]
TEXT = [
"Find someone who works in industry",
"Introduce someone to someone else",
"Help someone solve a square",
"Find someone who plays an instrument.",
"Find someone who has attended ISMIR for more than 5 years",
"Find someone for which this is their first ISMIR"]
def generate_text(rows, target_idx, matches, num_outputs=24):
outputs = []
for match_idx in matches[target_idx]:
outputs.append("Talk to %s" % rows[match_idx][1])
outputs.extend(TEXT)
categories = {
'affiliation': get_affilations(rows),
'education': get_education(rows),
'topics': get_topics(rows),
'country': get_countries(rows)
}
while len(outputs) < num_outputs:
fmt, key = random.choice(TEXT_FMTS)
value = random.choice(categories[key])
outputs.append(fmt % value)
return outputs
def make_card(name, contents, outfile):
"""writeme
Parameters
----------
x
Returns
-------
y
"""
tex_lines = []
tex_lines.append(r'\documentclass[10pt, a4paper]{article}')
tex_lines.append(r'\usepackage{tikz}')
tex_lines.append(r'\usepackage{fullpage}')
tex_lines.append(r'\usetikzlibrary{positioning,matrix}')
tex_lines.append(r'\renewcommand*{\familydefault}{\sfdefault}')
tex_lines.append(r'\usepackage{array}')
tex_lines.append(r'\begin{document}')
tex_lines.append(r'\pagestyle{empty}')
tex_lines.append(r'\begin{center}')
tex_lines.append(r'\Huge ISMIR 2014 Mixer Bingo\\')
tex_lines.append(r"\bigskip \huge \emph{%s} \\" % name)
tex_lines.append(r'\normalsize')
tex_lines.append(r'')
tex_lines.append(r'\bigskip')
random.shuffle(contents)
c = contents[0:12] + [r'FREE'] + contents[12:24]
tex_lines.append(r'\begin{tikzpicture}')
tex_lines.append(r"""\tikzset{square matrix/.style={
matrix of nodes,
column sep=-\pgflinewidth, row sep=-\pgflinewidth,
nodes={draw,
text height=#1/2-2.5em,
text depth=#1/2+2.5em,
text width=#1,
align=center,
inner sep=0pt
},
},
square matrix/.default=3.2cm
}""")
tex_lines.append(r'\matrix [square matrix]')
tex_lines.append(r'(shi)')
tex_lines.append(r'{')
tex_lines.append(
r"%s & %s & %s & %s & %s\\" % (c[0], c[1], c[2], c[3], c[4]))
tex_lines.append(
r"%s & %s & %s & %s & %s\\" % (c[5], c[6], c[7], c[8], c[9]))
tex_lines.append(
r"%s & %s & %s & %s & %s\\" % (c[10], c[11], c[12], c[13], c[14]))
tex_lines.append(
r"%s & %s & %s & %s & %s\\" % (c[15], c[16], c[17], c[18], c[19]))
tex_lines.append(
r"%s & %s & %s & %s & %s\\" % (c[20], c[21], c[22], c[23], c[24]))
tex_lines.append(r'};')
tex_lines.append(r'\foreach \i in {1,2,3,4,5}')
tex_lines.append(
r'\draw[line width=2pt] (shi-1-\i.north east) -- (shi-5-\i.south east);')
tex_lines.append(
r'\foreach \i in {1,2,3,4,5}')
tex_lines.append(
r'\draw[line width=2pt] (shi-1-\i.north west) -- (shi-5-\i.south west);')
tex_lines.append(
r'\foreach \i in {1,2,3,4,5}')
tex_lines.append(
r'\draw[line width=2pt] (shi-\i-1.north west) -- (shi-\i-5.north east);')
tex_lines.append(
r'\foreach \i in {1,2,3,4,5}')
tex_lines.append(
r'\draw[line width=2pt] (shi-\i-1.south west) -- (shi-\i-5.south east);')
tex_lines.append(r'\end{tikzpicture}')
tex_lines.append('')
tex_lines.append(r'\pagebreak')
tex_lines.append('')
tex_lines.append(r'\end{center}')
tex_lines.append(r'\end{document}')
with open(outfile, 'w') as f:
for line in tex_lines:
f.write("%s\n" % line)
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG)
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('a',
help='writeme')
parser.add_argument('--b', type=str,
default='apple',
help='writeme')
parser.add_argument('--verbose', action='store_true',
help='Print progress to the console.')
args = parser.parse_args()
sys.exit(0)
| 27.839329
| 81
| 0.593591
| 1,540
| 11,609
| 4.327922
| 0.246104
| 0.049212
| 0.08192
| 0.083271
| 0.221005
| 0.147937
| 0.131283
| 0.122581
| 0.113578
| 0.113578
| 0
| 0.015862
| 0.266862
| 11,609
| 416
| 82
| 27.90625
| 0.767242
| 0.160221
| 0
| 0.135371
| 0
| 0.017467
| 0.199315
| 0.015192
| 0
| 0
| 0
| 0
| 0
| 1
| 0.048035
| false
| 0
| 0.048035
| 0
| 0.139738
| 0.004367
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
34f1d3eabe979cf0b88b9e39d396c23d11f9013e
| 9,487
|
py
|
Python
|
bouncingball_cortix.py
|
seamuss1/BouncingBall
|
6c4ff0838fa0366798efd8922c2632a8bfa5f15b
|
[
"MIT"
] | 1
|
2019-08-11T23:55:05.000Z
|
2019-08-11T23:55:05.000Z
|
bouncingball_cortix.py
|
seamuss1/BouncingBall
|
6c4ff0838fa0366798efd8922c2632a8bfa5f15b
|
[
"MIT"
] | null | null | null |
bouncingball_cortix.py
|
seamuss1/BouncingBall
|
6c4ff0838fa0366798efd8922c2632a8bfa5f15b
|
[
"MIT"
] | null | null | null |
import os, time, datetime, threading, random
import numpy as np
import matplotlib.pyplot as plt
import sys
from cortix.src.module import Module
from cortix.src.port import Port
from cortix.src.cortix_main import Cortix
import time
from bb_plot import Plot
import shapely.geometry as geo
import shapely.ops
from shapely import affinity
class BouncingBall(Module):
def __init__(self,shape=None, runtime=10):
super().__init__()
self.shape = shape
self.runtime = runtime
self.bndry = []
coords = list(self.shape.exterior.coords)
#Parse the box(LineRing) to create a list of line obstacles
for c,f in enumerate(coords):
try:
cr = geo.LineString([coords[c],coords[c+1]])
except IndexError:
cr = geo.LineString([coords[c],coords[-1]])
break
self.bndry.append(cr)
bn = self.shape.bounds
self.r=1.0
for i in range(100): #Attempt to spawn ball within boundary
self.p0 = [random.uniform(bn[0],bn[2]),random.uniform(bn[1],bn[3])]
self.pnt = geo.point.Point(self.p0[0],self.p0[1])
self.circle = self.pnt.buffer(self.r)
if self.shape.contains(self.circle):
break
self.v0 = [random.uniform(-50,50),random.uniform(-30,30)]
self.cor = 0.95
self.a = (0,-9.81)
self.m = 1
self.KE = 0.5*self.m*((self.v0[0]**2+self.v0[1]**2)**0.5)**2
self.timestamp=str(datetime.datetime.now())
#Customize container class that is sent to other modules
self.messenger = Messenger()
self.messenger.circle = self.circle
self.messenger.timestamp = self.timestamp
self.messenger.m,self.messenger.r = 1,1
self.messenger.v = self.v0
self.messenger.p = self.p0
def run(self):
t = 0.01
self.elapsed, oe = 0,0
its = round(self.runtime/t)
portdic = dict()
for i in self.ports: #Send initial properties
if 'plot' not in str(i):
self.send(self.messenger,i)
for i in self.ports:
if 'plot' not in str(i):
portdic[str(i)] = self.recv(i)
for i in range(its):
self.elapsed += t
if oe != int(self.elapsed):
print('Time Elapsed: ', int(self.elapsed),'seconds\nVelocity: ', str(round(self.v0[0],2))+'i +'+str(round(self.v0[1],2))+'j')
oe = int(self.elapsed)
#Gravity calculations for timestep
self.p0[1] = 0.5*self.a[1]*t**2+self.v0[1]*t+self.p0[1]
self.p0[0] = 0.5*self.a[0]*t**2+self.v0[0]*t+self.p0[0]
self.v0[1] = self.a[1]*t + self.v0[1]
self.v0[0] = self.a[0]*t + self.v0[0]
#Update position and velocity variables
self.pnt = geo.point.Point(self.p0[0],self.p0[1])
self.circle = self.pnt.buffer(self.r)
self.messenger.v = self.v0
for shape in self.bndry: #Detects collision with boundary
if self.circle.crosses(shape) or self.circle.touches(shape) or self.circle.intersects(shape):
self.wall_collision(shape)
for name in portdic: #Detects collision with other objects
messenger = portdic[name]
shape = portdic[name].circle
ts = portdic[name].timestamp
for line in portdic[name].collision: #Undetected Collisions received as a message
if self.timestamp == line:
self.ball_collision(messenger)
if self.circle.crosses(shape) or self.circle.touches(shape) or self.circle.intersects(shape):
self.ball_shift(shape)
#Reacts to intersection between this object and another
if self.circle.crosses(shape) or self.circle.touches(shape) or self.circle.intersects(shape):
self.ball_collision(messenger)
self.ball_shift(shape)
self.messenger.collision.append(ts)
self.messenger.circle = self.circle
self.messenger.p = self.p0
for i in self.ports: #Send and receive messages for each timestep
self.send(self.messenger,i)
for i in self.ports:
if 'plot' in str(i): #Not receiving messages from plotting
continue
messenger = self.recv(i)
portdic[str(i)] = messenger
self.messenger.collision = [] #Reset list of collisions
for i in self.ports: #Send 'done' string to plot module as end condition
if 'plot' in str(i):
self.send('done',i)
print('Time Elapsed: ', self.elapsed,'seconds\nVelocity: ', str(round(self.v0[0],2))+'i +'+str(round(self.v0[1],2))+'j')
print('done')
return
def wall_collision(self,shape):
p1,p2 = shapely.ops.nearest_points(self.pnt,shape)
angle3 = np.arctan2(p2.y - p1.y, p2.x - p1.x)
d = shape.distance(self.pnt)
self.p0 = [self.p0[0]-(self.r-d)*np.cos(angle3), self.p0[1]-(self.r-d)*np.sin(angle3)]
self.circle = self.pnt.buffer(self.r)
angle2 = np.arctan2(self.v0[1], self.v0[0])
v = (self.v0[0]**2+self.v0[1]**2)**0.5
theta = angle2-angle3
vbi, vbj = v*np.sin(theta), v*np.cos(theta)
vbj = -vbj *self.cor
v = (vbi**2+vbj**2)**0.5
angle4 = np.arctan2(vbj, vbi)
angle1 = angle4 - angle3
self.v0 = [np.sin(angle1)*v, np.cos(angle1)*v]
def ball_shift(self,shape):
p1,p2 = shapely.ops.nearest_points(self.pnt,shape)
angle = np.arctan2(p2.y - p1.y, p2.x - p1.x)
d = shape.distance(self.pnt)
self.p0 = [self.p0[0]-(self.r*1.01-d)*np.cos(angle),self.p0[1]-(self.r*1.01-d)*np.sin(angle)]
self.pnt = geo.point.Point(self.p0[0],self.p0[1])
self.circle = self.pnt.buffer(self.r)
def ball_collision(self,messenger):
shape = messenger.circle
v2,m = messenger.v,messenger.m
v3 = (v2[0]**2+v2[1]**2)**0.5
phi = np.arctan2(v2[1],v2[0])
p1,p2 = shapely.ops.nearest_points(self.pnt,shape)
angle = np.arctan2(p2.y - p1.y, p2.x - p1.x)
angle2 = np.arctan2(self.v0[1], self.v0[0])
v = (self.v0[0]**2+self.v0[1]**2)**0.5
#Equation source: https://en.wikipedia.org/wiki/Elastic_collision
vpx=((v*np.cos(angle2-angle)*(self.m-m)+2*m*v3*np.cos(phi-angle))/(self.m+m))*np.cos(angle)+v*np.sin(angle2-angle)*np.cos(angle+np.pi/2)
vpy=((v*np.cos(angle2-angle)*(self.m-m)+2*m*v3*np.cos(phi-angle))/(self.m+m))*np.sin(angle)+v*np.sin(angle2-angle)*np.sin(angle+np.pi/2)
vp = (vpx**2+vpy**2)**0.5
self.v0 = [vpx,vpy]
print('Ball collision')
class Messenger:
def __init__(self, circle=None, collision = [], timestamp='0'):
self.circle = circle
self.collision = collision
self.timestamp = timestamp
self.m = 1
self.r = 1
self.v = []
self.p = []
#Example driver script
if __name__ == '__main__':
cortix = Cortix(use_mpi=False)
mod_list = []
shapes = ['triangle', 'squares', 'diamond']
while True:
print('Choose a shape: 1) Triangle, 2) Square, or 3) Diamond\n')
shape = input('>>>')
shape = shape.lower()
if shape == 'triangle' or shape =='1':
shape = geo.Polygon([(0, 0), (0, 60), (30, 30)])
break
if shape == 'square' or shape =='2':
shape = geo.box(-30,0,30,50)
break
if shape == 'triangle' or shape =='3':
shape = geo.box(-30,0,30,50)
shape = affinity.rotate(shape,45)
break
print('Input not recognized, try again')
while True:
print('Choose the number of Bouncing Balls\n')
balls = input('>>>')
try:
balls = int(balls)
if balls > 1000:
print('Wow good luck')
elif balls > 0:
break
else:
print('Choose a better number')
except:
print('Entry invalid')
while True:
print('How many seconds is the simulation?\n')
secs = input('>>>')
try:
secs = int(secs)
if secs > 50000:
print('Wow good luck')
elif secs > 0:
break
else:
print('Choose a better number')
except:
print('Entry invalid')
plot = Plot(shape=shape, length=balls)
cortix.add_module(plot)
for i in range(balls):
time.sleep(0.01)
app = BouncingBall(shape,runtime=secs)
mod_list.append(app)
cortix.add_module(app)
for c,i in enumerate(mod_list):
i.connect('plot-send{}'.format(c),plot.get_port('plot-receive{}'.format(c)))
for j in mod_list:
if i == j:
continue
name = '{}{}'.format(i.timestamp,j.timestamp)
name2 = '{}{}'.format(j.timestamp,i.timestamp)
j.connect(name, i.get_port(name2))
cortix.draw_network('network_graph.png')
cortix.run()
print('bye')
| 39.529167
| 144
| 0.54211
| 1,305
| 9,487
| 3.906513
| 0.197701
| 0.029423
| 0.013731
| 0.012946
| 0.354845
| 0.316399
| 0.283444
| 0.244998
| 0.244998
| 0.244998
| 0
| 0.041448
| 0.315906
| 9,487
| 239
| 145
| 39.694561
| 0.744068
| 0.068093
| 0
| 0.339713
| 0
| 0
| 0.055121
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.028708
| false
| 0
| 0.057416
| 0
| 0.100478
| 0.07177
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
34f2f00e1352060e6764c5a58765eca101cee86d
| 2,542
|
py
|
Python
|
lstm.py
|
ryubidragonfire/text-emo
|
a03a9aa0d2e055277fc63a70822816853e5a35c0
|
[
"MIT"
] | null | null | null |
lstm.py
|
ryubidragonfire/text-emo
|
a03a9aa0d2e055277fc63a70822816853e5a35c0
|
[
"MIT"
] | null | null | null |
lstm.py
|
ryubidragonfire/text-emo
|
a03a9aa0d2e055277fc63a70822816853e5a35c0
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Created on Thu Sep 29 13:07:10 2016
@author: chyam
purpose: A vanila lstm model for text classification.
"""
from __future__ import print_function
from keras.models import Sequential
from keras.layers import Dense, Dropout, Activation
from keras.layers import LSTM
from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn.cross_validation import train_test_split
import pandas as pd
from datetime import datetime
import preputils as pu
def main():
### Load data
filename = "C:/git/german-emo/data/clean-data-21092016.tsv"
df = pd.read_csv(filename, delimiter='\t'); print(df.shape)
### Perpare label
y, le = pu.prep_label(df);
### Prepare features (word-based) -> Split data into training and test sets
tfidf_vectorizer = TfidfVectorizer(analyzer='word', stop_words=None, ngram_range=(1,1), max_df=0.9, min_df=1)
X_tfidf_word_11gram = tfidf_vectorizer.fit_transform(df['Text'].values.astype('U')); print(X_tfidf_word_11gram.shape); #11468x26778
X_train, X_test, y_train, y_test = train_test_split(X_tfidf_word_11gram, y, test_size=0.3, train_size=0.7, random_state=88); del X_tfidf_word_11gram
X_train_array = X_train.toarray(); del X_train
X_test_array = X_test.toarray(); del X_test
### LSTM
nb_classes = len(le.classes_)
lstm(X_train_array, y_train, X_test_array, y_test, timesteps=1, batch_size=50, nb_epoch=2, nb_classes=nb_classes)
### Clean up
del X_train_array, X_test_array, y_train, y_test
return
def lstm(X_train, y_train, X_test, y_test, timesteps, batch_size, nb_epoch, nb_classes):
""" Building a lstm model."""
print('Starting LSTM ...')
print(str(datetime.now()))
feature_len = X_train.shape; print(feature_len[1])
model = Sequential()
#model.add(Embedding(max_features, 256, input_length=maxlen))
model.add(LSTM(input_dim=feature_len[1], output_dim=128, activation='sigmoid', inner_activation='hard_sigmoid'))
model.add(Dropout(0.5))
model.add(Dense(nb_classes))
model.add(Activation('sigmoid'))
model.compile(loss='sparse_categorical_crossentropy', optimizer='rmsprop', metrics=['accuracy'])
model.fit(X_train, y_train, batch_size=batch_size, nb_epoch=nb_epoch)
score = model.evaluate(X_test, y_test, batch_size=batch_size)
print('Test score:', score[0])
print('Test accuracy:', score[1])
print('LSTM finished ...')
print(str(datetime.now()))
return
if __name__ == '__main__':
main()
| 35.802817
| 152
| 0.712825
| 382
| 2,542
| 4.473822
| 0.395288
| 0.031597
| 0.023406
| 0.037449
| 0.021065
| 0
| 0
| 0
| 0
| 0
| 0
| 0.030942
| 0.160897
| 2,542
| 71
| 153
| 35.802817
| 0.770277
| 0.131393
| 0
| 0.097561
| 0
| 0
| 0.089991
| 0.035354
| 0
| 0
| 0
| 0
| 0
| 1
| 0.04878
| false
| 0
| 0.219512
| 0
| 0.317073
| 0.243902
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
34f48be78d73f96e6ef88433990d92e5dd1a350b
| 7,584
|
py
|
Python
|
python/models/model_factory.py
|
rwightman/pytorch-cdiscount
|
95901bd77888f7480f282e4b1541c0fc1e021bf9
|
[
"Apache-2.0"
] | 1
|
2022-03-09T09:40:43.000Z
|
2022-03-09T09:40:43.000Z
|
python/models/model_factory.py
|
rwightman/pytorch-cdiscount
|
95901bd77888f7480f282e4b1541c0fc1e021bf9
|
[
"Apache-2.0"
] | null | null | null |
python/models/model_factory.py
|
rwightman/pytorch-cdiscount
|
95901bd77888f7480f282e4b1541c0fc1e021bf9
|
[
"Apache-2.0"
] | null | null | null |
import torchvision.models
from .resnext101_32x4d import resnext101_32x4d
from .inception_v4 import inception_v4
from .inception_resnet_v2 import inception_resnet_v2
from .wrn50_2 import wrn50_2
from .my_densenet import densenet161, densenet121, densenet169, densenet201
from .my_resnet import resnet18, resnet34, resnet50, resnet101, resnet152
from .fbresnet200 import fbresnet200
from .dpn import dpn68, dpn68b, dpn92, dpn98, dpn131, dpn107
#from .transformed_model import TransformedModel
from .load_checkpoint import load_checkpoint
def normalizer_from_model(model_name):
if 'inception' in model_name:
normalizer = 'le'
elif 'dpn' in model_name:
normalizer = 'dpn'
else:
normalizer = 'torchvision'
return normalizer
model_config_dict = {
'resnet18': {
'model_name': 'resnet18', 'num_classes': 1000, 'input_size': 224, 'normalizer': 'torchvision',
'checkpoint_file': 'resnet18-5c106cde.pth', 'drop_first_class': False},
'resnet34': {
'model_name': 'resnet34', 'num_classes': 1000, 'input_size': 224, 'normalizer': 'torchvision',
'checkpoint_file': 'resnet34-333f7ec4.pth', 'drop_first_class': False},
'resnet50': {
'model_name': 'resnet50', 'num_classes': 1000, 'input_size': 224, 'normalizer': 'torchvision',
'checkpoint_file': 'resnet50-19c8e357.pth', 'drop_first_class': False},
'resnet101': {
'model_name': 'resnet101', 'num_classes': 1000, 'input_size': 224, 'normalizer': 'torchvision',
'checkpoint_file': 'resnet101-5d3b4d8f.pth', 'drop_first_class': False},
'resnet152': {
'model_name': 'resnet152', 'num_classes': 1000, 'input_size': 224, 'normalizer': 'torchvision',
'checkpoint_file': 'resnet152-b121ed2d.pth', 'drop_first_class': False},
'densenet121': {
'model_name': 'densenet121', 'num_classes': 1000, 'input_size': 224, 'normalizer': 'torchvision',
'checkpoint_file': 'densenet121-241335ed.pth', 'drop_first_class': False},
'densenet169': {
'model_name': 'densenet169', 'num_classes': 1000, 'input_size': 224, 'normalizer': 'torchvision',
'checkpoint_file': 'densenet169-6f0f7f60.pth', 'drop_first_class': False},
'densenet201': {
'model_name': 'densenet201', 'num_classes': 1000, 'input_size': 224, 'normalizer': 'torchvision',
'checkpoint_file': 'densenet201-4c113574.pth', 'drop_first_class': False},
'densenet161': {
'model_name': 'densenet161', 'num_classes': 1000, 'input_size': 224, 'normalizer': 'torchvision',
'checkpoint_file': 'densenet161-17b70270.pth', 'drop_first_class': False},
'dpn107': {
'model_name': 'dpn107', 'num_classes': 1000, 'input_size': 299, 'normalizer': 'dualpathnet',
'checkpoint_file': 'dpn107_extra-fc014e8ec.pth', 'drop_first_class': False},
'dpn92_extra': {
'model_name': 'dpn92', 'num_classes': 1000, 'input_size': 299, 'normalizer': 'dualpathnet',
'checkpoint_file': 'dpn92_extra-1f58102b.pth', 'drop_first_class': False},
'dpn92': {
'model_name': 'dpn92', 'num_classes': 1000, 'input_size': 299, 'normalizer': 'dualpathnet',
'checkpoint_file': 'dpn92-7d0f7156.pth', 'drop_first_class': False},
'dpn68': {
'model_name': 'dpn68', 'num_classes': 1000, 'input_size': 299, 'normalizer': 'dualpathnet',
'checkpoint_file': 'dpn68-abcc47ae.pth', 'drop_first_class': False},
'dpn68b': {
'model_name': 'dpn68b', 'num_classes': 1000, 'input_size': 299, 'normalizer': 'dualpathnet',
'checkpoint_file': 'dpn68_extra.pth', 'drop_first_class': False},
'dpn68b_extra': {
'model_name': 'dpn68b', 'num_classes': 1000, 'input_size': 299, 'normalizer': 'dualpathnet',
'checkpoint_file': 'dpn68_extra.pth', 'drop_first_class': False},
'inception_resnet_v2': {
'model_name': 'inception_resnet_v2', 'num_classes': 1001, 'input_size': 299, 'normalizer': 'le',
'checkpoint_file': 'inceptionresnetv2-d579a627.pth', 'drop_first_class': True},
}
def config_from_string(string, output_fn='log_softmax'):
config = model_config_dict[string]
config['output_fn'] = output_fn
return config
def create_model(
model_name='resnet50',
pretrained=False,
num_classes=1000,
checkpoint_path='',
**kwargs):
if 'test_time_pool' in kwargs:
test_time_pool = kwargs.pop('test_time_pool')
else:
test_time_pool = 0
if model_name == 'dpn68':
model = dpn68(
num_classes=num_classes, pretrained=pretrained, test_time_pool=test_time_pool)
elif model_name == 'dpn68b':
model = dpn68b(
num_classes=num_classes, pretrained=pretrained, test_time_pool=test_time_pool)
elif model_name == 'dpn92':
model = dpn92(
num_classes=num_classes, pretrained=pretrained, test_time_pool=test_time_pool)
elif model_name == 'dpn98':
model = dpn98(
num_classes=num_classes, pretrained=pretrained, test_time_pool=test_time_pool)
elif model_name == 'dpn131':
model = dpn131(
num_classes=num_classes, pretrained=pretrained, test_time_pool=test_time_pool)
elif model_name == 'dpn107':
model = dpn107(
num_classes=num_classes, pretrained=pretrained, test_time_pool=test_time_pool)
elif model_name == 'resnet18':
model = resnet18(num_classes=num_classes, pretrained=pretrained, **kwargs)
elif model_name == 'resnet34':
model = resnet34(num_classes=num_classes, pretrained=pretrained, **kwargs)
elif model_name == 'resnet50':
model = resnet50(num_classes=num_classes, pretrained=pretrained, **kwargs)
elif model_name == 'resnet101':
model = resnet101(num_classes=num_classes, pretrained=pretrained, **kwargs)
elif model_name == 'resnet152':
model = resnet152(num_classes=num_classes, pretrained=pretrained, **kwargs)
elif model_name == 'densenet121':
model = densenet121(num_classes=num_classes, pretrained=pretrained, **kwargs)
elif model_name == 'densenet161':
model = densenet161(num_classes=num_classes, pretrained=pretrained, **kwargs)
elif model_name == 'densenet169':
model = densenet169(num_classes=num_classes, pretrained=pretrained, **kwargs)
elif model_name == 'densenet201':
model = densenet201(num_classes=num_classes, pretrained=pretrained, **kwargs)
elif model_name == 'inception_resnet_v2':
model = inception_resnet_v2(num_classes=num_classes, pretrained=pretrained, **kwargs)
elif model_name == 'inception_v4':
model = inception_v4(num_classes=num_classes, pretrained=pretrained, **kwargs)
elif model_name == 'resnext101_32x4d':
model = resnext101_32x4d(num_classes=num_classes, pretrained=pretrained, **kwargs)
elif model_name == 'wrn50':
model = wrn50_2(num_classes=num_classes, pretrained=pretrained, **kwargs)
elif model_name == 'fbresnet200':
model = fbresnet200(num_classes=num_classes, pretrained=pretrained, **kwargs)
else:
assert False and "Invalid model"
if checkpoint_path and not pretrained:
load_checkpoint(model, checkpoint_path)
return model
def create_model_from_cfg(mc, checkpoint_path=''):
if 'kwargs' not in mc:
mc['kwargs'] = {}
model = create_model(
model_name=mc['model_name'],
num_classes=mc['num_classes'],
checkpoint_path=checkpoint_path if checkpoint_path else mc['checkpoint_file'],
**mc['kwargs']
)
return model
| 47.10559
| 105
| 0.681566
| 857
| 7,584
| 5.731622
| 0.115519
| 0.120114
| 0.052932
| 0.081433
| 0.552524
| 0.495318
| 0.477402
| 0.468037
| 0.468037
| 0.468037
| 0
| 0.076786
| 0.18605
| 7,584
| 160
| 106
| 47.4
| 0.718937
| 0.006197
| 0
| 0.118881
| 0
| 0
| 0.303742
| 0.037553
| 0
| 0
| 0
| 0
| 0.006993
| 1
| 0.027972
| false
| 0
| 0.06993
| 0
| 0.125874
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
34f4d60dbd3b87a88dce9e6ef7fc8bd1f475fd71
| 585
|
py
|
Python
|
add_+x.py
|
racytech/rpctests
|
886d97b9e16fd030586d0fca6945d8f7a277ae27
|
[
"Apache-2.0"
] | null | null | null |
add_+x.py
|
racytech/rpctests
|
886d97b9e16fd030586d0fca6945d8f7a277ae27
|
[
"Apache-2.0"
] | null | null | null |
add_+x.py
|
racytech/rpctests
|
886d97b9e16fd030586d0fca6945d8f7a277ae27
|
[
"Apache-2.0"
] | 1
|
2021-09-03T17:14:55.000Z
|
2021-09-03T17:14:55.000Z
|
#!/usr/bin/env python3
"""
Add +x to every .sh file
"""
# import argparse
import os
import subprocess
def go_recursive(search_dir: str):
objects = os.listdir(search_dir)
for obj in objects:
if obj == ".git":
continue
obj_path = f"{search_dir}/{obj}"
if os.path.isdir(obj_path):
go_recursive(obj_path)
if os.path.isfile(obj_path) and obj.endswith(".sh"):
try:
subprocess.run(["chmod", "+x", obj_path])
except:
print("There is an exception")
go_recursive(".")
| 20.172414
| 60
| 0.560684
| 76
| 585
| 4.171053
| 0.565789
| 0.11041
| 0.050473
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.002463
| 0.305983
| 585
| 29
| 61
| 20.172414
| 0.778325
| 0.105983
| 0
| 0
| 0
| 0
| 0.104854
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.0625
| false
| 0
| 0.125
| 0
| 0.1875
| 0.0625
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
34f5a27bc2eb816c9dabb375d6159c8eb8e17312
| 25,985
|
py
|
Python
|
texas.py
|
isaact23/texas
|
1ac70b00f0acf2f196aca87476d7bac97418afba
|
[
"MIT"
] | null | null | null |
texas.py
|
isaact23/texas
|
1ac70b00f0acf2f196aca87476d7bac97418afba
|
[
"MIT"
] | null | null | null |
texas.py
|
isaact23/texas
|
1ac70b00f0acf2f196aca87476d7bac97418afba
|
[
"MIT"
] | null | null | null |
# TEXAS HOLD'EM (Program by Isaac Thompson)
import random, itertools, copy, sys
import os
from playsound import playsound
import pyttsx3
# Set to true to enable betting.
do_bets = False
RANKS = ['2', '3', '4', '5', '6', '7', '8', '9', 'T', 'J', 'Q', 'K', 'A']
SUITS = ['C', 'D', 'H', 'S']
SORT_RANKS = {'2': 0, '3': 1, '4': 2, '5': 3, '6': 4, '7': 5, '8': 6, '9': 7, 'T': 8, 'J': 9, 'Q': 10, 'K': 11, 'A': 12}
SORT_SUITS = {'C': 0, 'D': 1, 'H': 2, 'S': 3}
RANK_NAMES = {'2': 'Two', '3': 'Three', '4': 'Four', '5': 'Five', '6': 'Six', '7': 'Seven', '8': 'Eight', '9': 'Nine', 'T': 'Ten',
'J': 'Jack', 'Q': 'Queen', 'K': 'King', 'A': 'Ace'}
SUIT_NAMES = {'C': 'Clubs', 'D': 'Diamonds', 'H': 'Hearts', 'S': 'Spades'}
DEAL_IN = ["Deal me in.",
"What are you waiting for? Give me two cards.",
"You're the dealer. Go ahead and deal.",
"Give me some cards please."]
FLOP = ["Time for the flop.",
"Put down the first three cards"]
PLAYER_SPEECH_1 = ["Not bad.",
"That's more than half.",
"The odds are in your favor.",
"You have an acknowledgable chance, my friend.",
"Just you wait. This will all change."]
PLAYER_SPEECH_2 = ["That's pretty good.",
"How sad.",
"Don't worry, the odds will change shortly.",
"You hear that? It's the winds of change.",
"I have to say I am not happy with you."]
PLAYER_SPEECH_3 = ["I might as well fold.",
"This is rather unfortunate.",
"Dang.",
"No. This can't be happening. No!",
"Welp. This is happening."]
PLAYER_WIN = ["You won this time around.",
"You win. What a shame.",
"You won. For the first time. For the last time.",
"Welp, I've been destroyed.",
"Good game.",
"Let's play again so I can righteously win."]
CPU_SPEECH_1 = ["Looks good for me.",
"That's a good thing.",
"Hopefully it stays that way.",
"Flip a coin and it'll land on my side.",
"Heh."]
CPU_SPEECH_2 = ["Prepare to lose.",
"The odds are in my favor.",
"Ha ha ha ha.",
"I will be beating you shortly.",
"I will trump you!"]
CPU_SPEECH_3 = ["You sir are doomed.",
"You might as well fold",
"Just give up. As far as you know I've got pocket aces",
"Prepare yourself mentally to be obliterated",
"This is the end for you!",
"Ha! You can't win!",
"You humans will never beat me!"]
CPU_WIN = ["You lose. Would you like to play again?",
"You have been righteously destroyed.",
"Good golly. Looks like humans are being phased out.",
"Rest in peace.",
"I win. Let's play again so I can win again.",
"Victory goes to me. What a surprise.",
"Get wrecked.",
"You've been destroyed by a computer. How do you feel?",
"Wow, what a loser. You should have been luckier."]
NEURAL_SPEECH = ["Well, this is going to be a boring round.",
"The outlook is not great for either of us",
"Let's both fold on three. One, two, three. Just kidding, I never fold.",
"I cannot express my infinite exhilaration through my sarcastic robot voice.",
"Yawn."]
DRAW = ["We tied. What are the odds?",
"Tie game. How embarassing for both of us."]
# Set up audio engine
audio_engine = pyttsx3.init()
audio_engine.setProperty('rate', 210)
# Synthesize text as speech
def say(text):
print(text)
try:
audio_engine.say(text)
audio_engine.runAndWait()
except:
pass
# Convert a card identity to a name (like 3H to Three of Hearts)
def name_card(card):
return RANK_NAMES[card[0]] + " of " + SUIT_NAMES[card[1]]
# Report the calculated game odds to the player.
# Includes statements of astronomical wit.
def tell_odds(prediction):
player_percent = str(round(prediction['player_win'] * 1000)/10)
player_str = "You are " + player_percent + " percent likely to win. "
computer_percent = str(round(prediction['computer_win'] * 1000)/10)
computer_str = "I am " + computer_percent + " percent likely to win. "
#draw = str(round(chances['draw'] * 1000)/10)
if prediction['draw'] > 0.5:
say("This is probably going to be a tie round.")
elif prediction['player_win'] > 0.9:
say(player_str + random.choice(PLAYER_SPEECH_3))
elif prediction['player_win'] > 0.7:
say(player_str + random.choice(PLAYER_SPEECH_2))
elif prediction['player_win'] >= 0.5:
say(player_str + random.choice(PLAYER_SPEECH_1))
elif prediction['computer_win'] > 0.9:
say(computer_str + random.choice(CPU_SPEECH_3))
elif prediction['computer_win'] > 0.7:
say(computer_str + random.choice(CPU_SPEECH_2))
elif prediction['computer_win'] > 0.5:
say(computer_str + random.choice(CPU_SPEECH_1))
else:
say(random.choice(NEUTRAL_SPEECH))
# A class that runs the game.
class PredictionAlgorithm():
def __init__(this):
this.turn = 1
this.next_round() # Main setup function - executed every round
this.player_money = 500
this.computer_money = 500
def next_round(this): # Reset and switch turns
if this.turn == 0:
this.turn = 1
else:
this.turn = 0
this.community_cards = []
this.computer_hand = []
this.player_hand = []
this.player_bet = 0
this.computer_bet = 0
this.deck = []
for suit in SUITS:
for rank in RANKS:
this.deck.append(rank + suit)
random.shuffle(this.deck)
this.maximum_percent_loss = round((random.random() + 0.15) * 10) / 10 # Prevents overbetting. The higher, the more aggressive.
this.maximum_percent_loss = 0.5
def draw_card(this): # Allow the player to specify a card, then remove it from the deck.
card = None
while card == None:
card = input("Draw a card: ")
card = card.upper()
if len(card) != 2:
print("Not a valid card. Must be two characters long: one for the rank, second for the suit.")
card = None
elif (not card[0] in RANKS) or (not card[1] in SUITS):
print("Not a valid card. Use the format: 2H for Two of Hearts, TC for Ten of Clubs, etc.")
card = None
elif not card in this.deck:
print("That card is no longer in the deck. Choose a different card.")
card = None
this.deck.remove(card)
print("Drew the " + RANK_NAMES[card[0]] + " of " + SUIT_NAMES[card[1]])
return card
def play(this): # Go through a round until a winner is determined.
if do_bets:
say("You have " + str(this.player_money) + " tokens. I have " + str(this.computer_money) + " tokens.")
#say("I am playing with an aggressiveness factor of " + str(this.maximum_percent_loss))
if this.computer_money == 0:
say(random.choice(PLAYER_WIN))
sys.exit()
elif this.computer_money == 1:
say("You play the big blind this round.")
this.player_bet = 2
this.computer_bet = 1
elif (this.turn == 1 or this.player_money == 1) and this.player_money != 0:
say("You play the small blind this round.")
this.player_bet = 1
this.computer_bet = 2
elif this.turn == 0 and this.player_money > 1:
say("You play the big blind this round.")
this.player_bet = 2
this.computer_bet = 1
else:
say(random.choice(CPU_WIN))
sys.exit()
say(random.choice(DEAL_IN))
#CardDetector.GetComputerHand()
for i in range(2):
this.computer_hand.append(this.draw_card())
result = this.bets()
if result == 2:
return
if do_bets:
this.state_bets()
say(random.choice(FLOP))
for i in range(3):
this.community_cards.append(this.draw_card())
if do_bets:
if this.computer_bet != this.computer_money and this.player_bet != this.player_money:
result = this.bets()
if result == 2:
return
this.state_bets()
tell_odds(this.find_winning_chances(0.35))
say("Deal another card.")
this.community_cards.append(this.draw_card())
if do_bets:
if this.computer_bet != this.computer_money and this.player_bet != this.player_money:
result = this.bets()
if result == 2:
return
this.state_bets()
tell_odds(this.find_winning_chances(0.4))
say("Deal the final card.")
this.community_cards.append(this.draw_card())
if do_bets:
if this.computer_bet != this.computer_money and this.player_bet != this.player_money:
result = this.bets()
if result == 2:
return
this.state_bets()
tell_odds(this.find_winning_chances(0.45))
say("Alright, show me your hand.")
for i in range(2):
this.player_hand.append(this.draw_card())
player_best_hand = this.find_best_hand(this.community_cards + this.player_hand)
computer_best_hand = this.find_best_hand(this.community_cards + this.computer_hand)
say("Your best hand was " + player_best_hand.name)
say("My best hand was " + computer_best_hand.name)
winner = this.winning_hand(player_best_hand, computer_best_hand)
if winner == 0:
say(random.choice(PLAYER_WIN))
this.player_wins()
elif winner == 1:
say(random.choice(CPU_WIN))
this.computer_wins()
elif winner == 2:
say(random.choice(DRAW))
def computer_wins(this):
this.computer_money += this.player_bet
this.player_money -= this.player_bet
this.next_round()
def player_wins(this):
this.player_money += this.computer_bet
this.computer_money -= this.computer_bet
this.next_round()
def state_bets(this):
print("You have bet " + str(this.player_bet))
print("Computer has bet " + str(this.computer_bet))
# Run through a betting cycle with the player.
def bets(this):
if do_bets:
computer_played = False
player_played = False
skip_player = False
if this.turn == 1:
skip_player = True
chances = None
while True: # Betting cycle
this.state_bets()
if skip_player:
skip_player = False
else: # Player bet
bet = [""] # Obtain the command
commands = ["bet", "raise", "call", "check", "fold"]
while not bet[0] in commands:
bet = input("Bet, Raise, Call, Check, or Fold: ").split()
if len(bet) == 0:
bet = [""]
else:
bet[0] = bet[0].lower()
player_played = True
if bet[0] == "bet" or bet[0] == "raise": # Parse the command
try:
bet[1] = int(bet[1])
amount = bet[1]
if bet[1] + this.computer_bet > this.player_money:
print("You only have " + str(this.player_money))
raise Exception() # Break out of try loop and ask for a new bet
except: # Get a valid bet
amount = -1
while amount < 0:
amount = input("How much: ")
try:
amount = int(amount)
if amount + this.computer_bet > this.player_money:
amount = -1
print("You only have " + str(this.player_money))
except:
amount = -1
this.player_bet += amount
elif bet[0] == "call" or bet[0] == "check":
if this.player_bet < this.computer_bet: # Raise the bet to match the computer bet
this.player_bet = this.computer_bet
if this.player_bet > this.player_money: # Limit calling to the player's money
this.player_bet = this.player_money
if player_played and computer_played:
return
elif bet[0] == "fold":
this.computer_wins()
return 2
this.state_bets()
# Computer bet
if chances == None:
chances = this.find_winning_chances({0: 0.22, 3: 0.3, 4: 0.3, 5: 1}[len(this.community_cards)])
if this.player_bet > this.computer_bet:
call_bet = this.player_bet # Match the player without exceeding the computer balance.\
else:
call_bet = this.computer_bet
if call_bet > this.computer_money:
call_bet = this.computer_money
expected_outcomes = {'fold': this.computer_money - this.computer_bet}
if this.computer_money - this.computer_bet + 1 > 0: # If we can raise, calculate which raise value is the best.
for raise_value in range(0, this.computer_money - this.player_bet + 1):
expected_outcomes[raise_value] = {}
expected_outcomes[raise_value]['win'] = this.computer_money + this.player_bet + raise_value
expected_outcomes[raise_value]['loss'] = this.computer_money - this.player_bet - raise_value
expected_outcomes[raise_value]['draw'] = this.computer_money
expected_outcomes[raise_value]['expected'] = expected_outcomes[raise_value]['win'] * chances['computer_win'] + \
expected_outcomes[raise_value]['loss'] * chances['player_win'] + \
expected_outcomes[raise_value]['draw'] * chances['draw']
computer_played = True
best_choice = 'fold'
best_expected_value = expected_outcomes['fold']
for choice in expected_outcomes:
if not choice == 'fold':
if expected_outcomes[choice]['expected'] > best_expected_value:
#if expected_outcomes[choice]['loss'] >= this.computer_money * (1 - this.maximum_percent_loss):
best_choice = choice
best_expected_value = expected_outcomes[choice]['expected']
if best_choice == 'fold':
say("I fold.")
this.player_wins()
return 2
elif best_choice == 0: # Call/Check
if this.computer_bet == this.player_bet:
say("I check.")
else:
say("I call.")
this.computer_bet = call_bet
if player_played and computer_played:
return
else: # Call and raise
say("I call and raise " + str(best_choice))
this.computer_bet = this.player_bet + best_choice
def find_winning_chances(this, accuracy=1): # Accuracy of 1 calculates perfectly, but lower values are faster.
# Increments for each possible final outcome, then calculated as percentages in the end.
player_wins = 0
computer_wins = 0
draws = 0
false_deck = copy.deepcopy(this.deck)
while len(false_deck) > len(this.deck) * accuracy:
false_deck.pop(random.randrange(0, len(false_deck)))
community_combos = list(itertools.combinations(false_deck, 5 - len(this.community_cards)))
for community_combo in community_combos:
community_combo = list(community_combo)
for card in community_combo:
false_deck.remove(card) # Temporarily remove the community cards from the deck
full_community_combo = community_combo + this.community_cards # Add the cards we already know so we have a set of five
player_combos = list(itertools.combinations(false_deck, 2))
for player_combo in player_combos: # Based on reduced deck, determine all possibilities for the player's hand and beating the computer
player_combo = list(player_combo)
best_player_hand = this.find_best_hand(player_combo + full_community_combo)
best_computer_hand = this.find_best_hand(this.computer_hand + full_community_combo)
winner = this.winning_hand(best_player_hand, best_computer_hand)
if winner == 0:
player_wins += 1
elif winner == 1:
computer_wins += 1
else:
draws += 1
false_deck += community_combo # Add the cards back to the deck
total_scenarios = player_wins + computer_wins + draws
return {'player_win': player_wins / total_scenarios,
'computer_win': computer_wins / total_scenarios,
'draw': draws / total_scenarios}
def find_best_hand(this, cards): # Find the best hand out of the community cards and two personal cards.
combinations = list(itertools.combinations(cards, 5))
best_hand = None
for combo in combinations:
hand = Hand()
hand.cards = list(combo)
hand.evaluate()
if best_hand == None:
best_hand = hand
else:
if this.winning_hand(hand, best_hand) == 0:
best_hand = hand
return best_hand
def winning_hand(this, hand0, hand1):
if hand0.hand_type < hand1.hand_type:
return 0
elif hand0.hand_type > hand1.hand_type:
return 1
else:
assert len(hand0.high_cards) == len(hand1.high_cards), 'These two hands have a different number of kickers: ' \
+ str(hand0.cards) + ' ' + str(hand1.cards)
i = 0
while i < len(hand0.high_cards):
if SORT_RANKS[hand0.high_cards[i][0]] > SORT_RANKS[hand1.high_cards[i][0]]:
return 0
elif SORT_RANKS[hand0.high_cards[i][0]] < SORT_RANKS[hand1.high_cards[i][0]]:
return 1
i += 1
return 2 # Draw
class Hand(): # 5 card hand
# Initialize an empty hand.
def __init__(this, cards=[]):
this.name = None
this.hand_type = -1
this.high_cards = {}
this.cards = cards
# Add a card to the hand.
def append(this, card):
this.cards.append(card)
# Determine the value of our hand.
# Identify royal flush, straights, two-pair combos, etc.
def evaluate(this):
this.cards = sorted(this.cards, key=lambda card: (SORT_RANKS[card[0]], card[1])) # Sort the cards by rank first, then by suit
# Flush
flush = True
for i, card in enumerate(this.cards):
if i == 4:
break
elif this.cards[i][1] != this.cards[i + 1][1]:
flush = False
break
# Royal flush
if flush:
if this.cards[0][0] == 'T' and this.cards[1][0] == 'J' and this.cards[2][0] == 'Q' and this.cards[3][0] == 'K' and this.cards[4][0] == 'A':
this.hand_type = 0
this.name = 'Royal Flush'
return
# Straight (Search both ways from the beginning card to find straight from both directions
straight = False
r = 0
l = 0
while True:
difference = abs(SORT_RANKS[this.cards[r + 1][0]] - SORT_RANKS[this.cards[r][0]])
if difference == 1 or difference == 12:
r += 1
if r == 4:
break
else:
break
while True:
difference = abs(SORT_RANKS[this.cards[-l - 1][0]] - SORT_RANKS[this.cards[-l][0]])
if difference == 1 or difference == 12:
l += 1
if l == 4:
break
else:
break
if r + l == 4:
straight = True
# Straight flush
if straight and flush:
this.hand_type = 1
if this.cards[r][0] == '4' or this.cards[r][0] == '3' or this.cards[r][0] == '2':
this.name = 'Straight Flush (Ace-High)'
this.high_cards[0] = this.cards[4]
elif this.cards[4][0] == 'A' and this.cards[3][0] == '5':
this.name = 'Straight Flush (Steel Wheel)'
this.high_cards[0] = this.cards[r]
else:
this.name = 'Straight Flush (' + RANK_NAMES[this.cards[4][0]] + '-High)'
this.high_cards[0] = this.cards[4]
return
# Group cards for later disambiguation
groups = [[this.cards[0]]]
for i, card in enumerate(this.cards):
if i > 0:
new_card = True
for group in groups:
if group[0][0] == card[0]:
group.append(card)
new_card = False
break
if new_card:
groups.append([card])
groups = sorted(groups, key=lambda group: -len(group)) # Biggest groups first
# 4 of a kind
if len(groups[0]) == 4:
this.hand_type = 2
this.high_cards[0] = groups[0][0]
this.high_cards[1] = groups[1][0]
this.name = 'Four of a Kind'
return
# Full House
if len(groups[0]) == 3 and len(groups[1]) == 2:
this.hand_type = 3
this.high_cards[0] = groups[0][0]
this.high_cards[1] = groups[1][0]
if groups[0][0][0] == 'K' and groups[0][1][0] == 'K' and groups[0][2][0] == 'K' and groups[1][0][0] == 'A' and groups[1][1][0] == 'A':
this.name = 'Full House (Nativity)'
else:
this.name = 'Full House'
return
# Flush
if flush:
this.hand_type = 4
this.high_cards[0] = groups[0][0]
this.name = 'Flush'
return
# Straight - code adapted from earlier (Wheel, Sucker Straight)
if straight:
this.hand_type = 5
if this.cards[r][0] == '4' or this.cards[r][0] == '3' or this.cards[r][0] == '2':
this.name = 'Straight (Ace-High)'
this.high_cards[0] = this.cards[4]
elif this.cards[4][0] == 'A' and this.cards[3][0] == '5':
this.name = 'Sucker Straight'
this.high_cards[0] = this.cards[r]
else:
this.name = 'Straight (' + RANK_NAMES[this.cards[4][0]] + '-High)'
this.high_cards[0] = this.cards[4]
return
# Three of a Kind
if len(groups[0]) == 3:
this.hand_type = 6
this.name = 'Three of a Kind'
this.high_cards[0] = groups[0][0] # Three of a kind
this.high_cards[1] = groups[2][0] # High kicker
this.high_cards[2] = groups[1][0] # Low kicker
return
# Two Pairs
if len(groups[0]) == 2 and len(groups[1]) == 2:
this.hand_type = 7
this.name = 'Two Pairs'
this.high_cards[0] = groups[1][0] # Highest pair
this.high_cards[1] = groups[0][0] # Lowest pair
this.high_cards[2] = groups[2][0] # Kicker
return
# Pair
if len(groups[0]) == 2:
this.hand_type = 8
this.name = 'Pair'
this.high_cards[0] = groups[0][0] # Pair
this.high_cards[1] = groups[3][0] # High kicker
this.high_cards[2] = groups[2][0] # Mid kicker
this.high_cards[3] = groups[1][0] # Low kicker
return
# Junk
this.hand_type = 9
this.name = 'Junk'
assert len(groups) == 5, "Error! We have been dealt a broken hand."
this.high_cards[0] = groups[4][0] # Highest card
this.high_cards[1] = groups[3][0]
this.high_cards[2] = groups[2][0]
this.high_cards[3] = groups[1][0]
this.high_cards[4] = groups[0][0] # Lowest card
return
if __name__ == "__main__":
ALG = PredictionAlgorithm()
say("Let's play Texas Hold'Em.")
while True:
ALG.play()
| 40.792779
| 151
| 0.516452
| 3,201
| 25,985
| 4.058419
| 0.148391
| 0.041567
| 0.027019
| 0.01401
| 0.352013
| 0.272804
| 0.217381
| 0.151797
| 0.129628
| 0.120237
| 0
| 0.025388
| 0.375486
| 25,985
| 636
| 152
| 40.856918
| 0.775142
| 0.087897
| 0
| 0.302913
| 0
| 0.005825
| 0.13822
| 0
| 0
| 0
| 0
| 0
| 0.003884
| 1
| 0.03301
| false
| 0.001942
| 0.007767
| 0.001942
| 0.097087
| 0.017476
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
34f5d659040a322d337330d8a9d7b5449d63b66f
| 443
|
py
|
Python
|
no. of occurences of substring.py
|
devAmoghS/Python-Programs
|
5b8a67a2a41e0e4a844ae052b59fc22fdcdbdbf9
|
[
"MIT"
] | 1
|
2019-09-18T14:06:50.000Z
|
2019-09-18T14:06:50.000Z
|
no. of occurences of substring.py
|
devAmoghS/Python-Programs
|
5b8a67a2a41e0e4a844ae052b59fc22fdcdbdbf9
|
[
"MIT"
] | null | null | null |
no. of occurences of substring.py
|
devAmoghS/Python-Programs
|
5b8a67a2a41e0e4a844ae052b59fc22fdcdbdbf9
|
[
"MIT"
] | null | null | null |
"""
s="preeni"
ss="e"
"""
s=input("enter the string:")
ss=input("enter the substring:")
j=0
for i in range(len(s)):
m=s.find(ss)
#the first occurence of ss
if(j==0):
print("m=%d"%m)
if(m== -1 and j==0):
print("no such substring is available")
break
if(m== -1):
break
else :
j=j+1
s=s[m+1:]
# print(s)
print("no. of occurences is %s"%j)
| 17.038462
| 48
| 0.465011
| 72
| 443
| 2.861111
| 0.458333
| 0.029126
| 0.126214
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.024221
| 0.34763
| 443
| 25
| 49
| 17.72
| 0.688581
| 0.117381
| 0
| 0.125
| 0
| 0
| 0.264045
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.1875
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
34f78347f261274809e3a7533c6bc409939bf9b0
| 1,039
|
py
|
Python
|
leetcode/code/maximumProduct.py
|
exchris/Pythonlearn
|
174f38a86cf1c85d6fc099005aab3568e7549cd0
|
[
"MIT"
] | null | null | null |
leetcode/code/maximumProduct.py
|
exchris/Pythonlearn
|
174f38a86cf1c85d6fc099005aab3568e7549cd0
|
[
"MIT"
] | 1
|
2018-11-27T09:58:54.000Z
|
2018-11-27T09:58:54.000Z
|
leetcode/code/maximumProduct.py
|
exchris/pythonlearn
|
174f38a86cf1c85d6fc099005aab3568e7549cd0
|
[
"MIT"
] | null | null | null |
#!/bin/usr/python
# -*- coding:utf-8 -*-
# 628.三个数的最大乘积
class Solution:
def maximumProduct(self, nums):
if len(nums) == 3:
return nums[0] * nums[1] * nums[2]
elif len(nums) < 3:
return None
else:
z_num, f_num = [], []
for i in nums:
if i < 0:
f_num.append(i) # 负数列表
else:
z_num.append(i) # 正数列表
z_num.sort(reverse=True)
f_num.sort()
sum1, sum2 = 1, 1
if len(f_num) < 2:
return z_num[0] * z_num[1] * z_num[2]
elif len(z_num) < 2:
return f_num[0] * f_num[1] * z_num[0]
else:
sum2 *= f_num[0]
sum2 *= f_num[1]
sum2 *= z_num[0]
sum1 *= z_num[0]
sum1 *= z_num[1]
sum1 *= z_num[2]
return max(sum1, sum2)
s = Solution()
num = s.maximumProduct([-4, -3, -2, -1, 60])
print(num)
| 25.341463
| 53
| 0.405197
| 137
| 1,039
| 2.927007
| 0.313869
| 0.119701
| 0.049875
| 0.069825
| 0.054863
| 0.054863
| 0
| 0
| 0
| 0
| 0
| 0.075
| 0.46102
| 1,039
| 40
| 54
| 25.975
| 0.641071
| 0.057748
| 0
| 0.096774
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.032258
| false
| 0
| 0
| 0
| 0.225806
| 0.032258
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
34f90454724956c5a7e90a92e40de7bf13365c40
| 20,610
|
py
|
Python
|
src/hr_system/hr_system.py
|
pablomarcel/HR-System
|
25edf82d0f4f37ededfb6c6b713a5d7c455ff67e
|
[
"MIT"
] | null | null | null |
src/hr_system/hr_system.py
|
pablomarcel/HR-System
|
25edf82d0f4f37ededfb6c6b713a5d7c455ff67e
|
[
"MIT"
] | null | null | null |
src/hr_system/hr_system.py
|
pablomarcel/HR-System
|
25edf82d0f4f37ededfb6c6b713a5d7c455ff67e
|
[
"MIT"
] | null | null | null |
import sys
import pyfiglet
import pandas as pd
import numpy as np
from tabulate import tabulate
import dateutil
import datetime
import re
result = pyfiglet.figlet_format("h r s y s t e m", font="slant")
strStatus = ""
class UserSelection:
"""Handles User Selection Logic
the class is used to implement a case-switch construct in python
"""
def switch(self, strChoice):
"""Builds a function name based off user choice and triggers the actions"""
default = "Incorrect Selection"
return getattr(self, "case_" + str(strChoice), lambda: default)()
def case_1(self):
"""User selected print a list of all employees"""
IO.print_all_employees(IO.get_employee_db())
pass
def case_2(self):
"""User selected Print a list of employees currently employed"""
IO.print_all_employees_employed(IO.get_employee_db())
pass
def case_3(self):
"""User selected Print a list of employees who have left in the past month"""
IO.print_employees_departures(IO.get_employee_db())
pass
def case_4(self):
"""User selected Display a reminder to schedule annual review"""
IO.print_review_reminders(IO.get_employee_db())
pass
def case_5(self):
"""User selected Capture employee information"""
(
employeeID,
firstName,
lastName,
fullName,
address,
ssn,
dateOfBirth,
jobTitle,
startDate,
endDate,
) = IO.capture_employee_data(IO.get_employee_db())
df = Processor.append_row(
IO.get_employee_db(),
employeeID,
firstName,
lastName,
fullName,
address,
ssn,
dateOfBirth,
jobTitle,
startDate,
endDate,
)
Processor.append_to_csv(df)
pass
def case_6(self):
"""User selected Delete record"""
fullName = IO.input_name_to_delete()
Processor.delete_record(IO.get_employee_db(), fullName)
pass
def case_7(self):
"""User selected Exit"""
print("Goodbye ")
sys.exit()
class Processor:
"""Performs Processing tasks"""
@staticmethod
def delete_record(dframe, name):
"""Generates a new DataFrame Filtering Out the record corresponding to the name to delete
:param dframe: (Pandas DataFrame) DataFrame containing employee information
:param name: (String) String representing the name to delete
:return: nothing
"""
df = dframe[(dframe.FullName != name)]
newdf = df.copy()
Processor.update_csv(newdf)
@staticmethod
def update_csv(dframe):
"""Writes the filtered DataFrame to a csv file.
This method is used when the user decides to delete record
:param dframe: (Pandas DataFrame) DataFrame containing employee information
:return: nothing
"""
dframe.to_csv("EmployeeData.csv", index=False)
@staticmethod
def generate_employee_id(dframe):
"""Generates unique employee id for the next employee to be added
:param dframe: (Pandas DataFrame) DataFrame containing employee information
:return next_id: (Integer) Next ID to be used for an employee record
"""
max_id = dframe["EmployeeID"].max()
next_id = max_id + 1
return next_id
@staticmethod
def append_row(
df, id, first, last, full, address, ssn, dob, job, startDate, endDate
):
"""Generates a row of data to be appended to a pandas DataFrame
:param dframe: (Pandas DataFrame) DataFrame containing employee information
:param id: (Integer) Next ID to be used for an employee record
:param first: (String) First Name to be used for an employee record
:param last: (String) Last Name to be used for an employee record
:param full: (String) Full Name to be used for an employee record
:param address: (String) Address to be used for an employee record
:param ssn: (String) Social Security Number to be used for an employee record
:param dob: (String) Date of Birth to be used for an employee record
:param job: (String) Job Title to be used for an employee record
:param startDate: (String) Start Date to be used for an employee record
:param endDate: (String) End Date to be used for an employee record
:return df: (Pandas DataFrame) a new Pandas DataFrame to be written to a csv
"""
new_row = {
"EmployeeID": id,
"FirstName": first,
"LastName": last,
"FullName": full,
"Address": address,
"ssn": ssn,
"DateOfBirth": dob,
"JobTitle": job,
"StartDate": startDate,
"EndDate": endDate,
}
# append row to the dataframe
df = df.append(new_row, ignore_index=True)
return df
@staticmethod
def append_to_csv(df):
"""Writes a new DataFarme to the csv file.
This method is used when the user decides to add a new record to the csv
:param df: (Pandas DataFrame) DataFrame containing employee information
:return: nothing
"""
df.to_csv("EmployeeData.csv", index=False)
@staticmethod
def isValidSSN(str):
"""Validates the social security format
:param str: (String) string that represents the social security number
:return: (Boolean)
"""
# This code is contributed by avanitrachhadiya2155
# Regex to check valid
# SSN (Social Security Number).
regex = "^(?!666|000|9\\d{2})\\d{3}-(?!00)\\d{2}-(?!0{4})\\d{4}$"
# Compile the ReGex
p = re.compile(regex)
# If the string is empty
# return false
if str == None:
return False
# Return if the string
# matched the ReGex
if re.search(p, str):
return True
else:
return False
class IO:
"""Performs Input and Output tasks"""
@staticmethod
def get_menu(argument):
"""Uses dictionaries to display options to the user
:param argument: (Integer) None
:return: (String) the value of the switcher dictionary
"""
def one():
return "1) Print a list of all employees"
def two():
return "2) Print a list of employees currently employed"
def three():
return "3) Print a list of employees who have left in the past month"
def four():
return "4) Display reminder to schedule annual review"
def five():
return "5) Capture employee information"
def six():
return "6) Delete record"
def seven():
return "7) Exit"
switcher = {
1: one(),
2: two(),
3: three(),
4: four(),
5: five(),
6: six(),
7: seven(),
}
return switcher.get(argument, "Invalid Selection")
@staticmethod
def input_menu_choice():
"""Gets the menu choice from a user
:param: None
:return: string
"""
while True:
try:
choice = str(
input("Which option would you like to perform? [1 to 7] - ")
).strip()
if choice not in ["1", "2", "3", "4", "5", "6", "7"]:
raise ValueError("Choice not an option, enter 1, 2, 3, 4, 5, 6, 7")
except ValueError as e:
print(e)
else:
break
print() # Add an extra line for looks
return choice
@staticmethod
def input_press_to_continue(optional_message=""):
"""Pause program and show a message before continuing
:param optional_message: An optional message you want to display
:return: nothing
"""
print(optional_message)
input("Press the [Enter] key to continue.")
@staticmethod
def print_all_employees(dframe):
"""Displays all employees
:param dframe: (Pandas DataFrame) a Pandas DataFrame containing all employee info.
:return: nothing
"""
IO.print_header()
print("List of all employees: ")
IO.print_footer()
df = dframe.copy()
df["StartDate"] = pd.to_datetime(df["StartDate"])
print(tabulate(df, headers="keys", tablefmt="psql", showindex=False))
@staticmethod
def get_employee_db():
"""Reads the csv and puts it in a pandas dataframe
:param: None
:return df: (Data Frame) a pandas dataframe
"""
df = pd.read_csv("EmployeeData.csv")
return df
@staticmethod
def print_all_employees_employed(dframe):
"""Displays the employees currently employed at the company
:param dframe: (Pandas DataFrame) DataFrame containing employee information
:return: nothing
"""
# Filter out those employees who have left.
# That is, the ones that have a real 'EndDate'
# The employees currently employed have EndDate = None
newdf = dframe[(dframe.EndDate == "NONE")]
df = newdf.copy()
df["StartDate"] = pd.to_datetime(df["StartDate"])
IO.print_header()
print("List of all employees currently employed: ")
IO.print_footer()
print(tabulate(df, headers="keys", tablefmt="psql", showindex=False))
@staticmethod
def print_employees_departures(dframe):
"""Displays a list of employees that have left the company in the past 30 days
:param dframe: (Pandas DataFrame) A DataFrame that contains employee information
:return: nothing
"""
# Filter out those employees who have NOT left.
# That is, the ones that have EndDate = None
# The employees who have left have EndDate = xx/xx/xxxx
df = dframe[(dframe.EndDate != "NONE")]
newdf = df.copy()
newdf["EndDate"] = pd.to_datetime(newdf["EndDate"])
date = datetime.datetime.today().replace(microsecond=0)
df_filter = newdf[newdf.EndDate > date - pd.to_timedelta("30day")]
IO.print_header()
print("List of all employees who have left the company in the past 30 days: ")
IO.print_footer()
print(tabulate(df_filter, headers="keys", tablefmt="psql", showindex=False))
@staticmethod
def print_review_reminders(dframe):
"""Displays a list of employees that have left the company in the past 30 days
:param dframe: (Pandas DataFrame) A DataFrame that contains employee information
:return: nothing
"""
df = dframe[(dframe.EndDate == "NONE")]
newdf = df.copy()
date = datetime.datetime.today().replace(microsecond=0)
newdf["StartDate"] = pd.to_datetime(newdf["StartDate"])
newdf['Month'] = pd.DatetimeIndex(newdf['StartDate']).month
newdf['Day'] = pd.DatetimeIndex(newdf['StartDate']).day
newdf['CalendarYear'] = date.year
newdf['DateForReview'] = pd.to_datetime((newdf.CalendarYear * 10000 + newdf.Month * 100 + newdf.Day).apply(str),
format='%Y%m%d')
df_filter = newdf[(newdf.DateForReview - pd.to_timedelta("90days") < date) & (newdf.DateForReview >= date)]
df_df = df_filter.drop(['Month', 'Day', 'CalendarYear', 'ssn'], axis=1)
#IO.print_header()
print()
print('FRIENDLY REMINDER! Anual Reviews are coming up for the following employees: ')
#IO.print_footer()
print(tabulate(df_df, headers="keys", tablefmt="psql", showindex=False))
@staticmethod
def input_name_to_delete():
"""Captures the name of the employee to delete
:param: None
:return strName: (String) String containing the full name of the person
"""
while True:
try:
strName = str(input("Enter Full Name: ")).strip()
if strName.isnumeric():
raise ValueError("Name is Numeric. Enter a valid name: ")
elif strName == "":
raise ValueError("Name is empty. Enter a valid Name: ")
except ValueError as e:
print(e)
else:
break
return strName
@staticmethod
def capture_employee_data(dframe):
"""Captures employee data for new record
:param dframe: (Pandas DataFrame) a DataFrame with employee info
:return employeeID: (Integer) Unique Employee ID
:return firstName: (String) First Name
:return lastName: (String) Last Name
:return fullName: (String) Full Name
:return address: (String) Address
:return ssn: (String) Social Security Number
:return dateOfBirth: (String) Date of Birth
:return jobTitle: (String) Job Title
:return startDate: (String) Start Date
:return endDate: (String) End Date
"""
employeeID = Processor.generate_employee_id(dframe)
firstName = IO.capture_first_name()
lastName = IO.capture_last_name()
fullName = firstName + " " + lastName
address = IO.capture_address()
ssn = IO.capture_ssn()
dateOfBirth = IO.capture_date_of_birth()
jobTitle = IO.capture_job_title()
startDate = IO.capture_start_date()
endDate = IO.capture_end_date()
return (
employeeID,
firstName,
lastName,
fullName,
address,
ssn,
dateOfBirth,
jobTitle,
startDate,
endDate,
)
@staticmethod
def capture_first_name():
"""Captures First Name
:param: None
:return: Nothing
"""
while True:
try:
strText = str(input("Enter First Name: ")).strip()
if strText.isnumeric():
raise ValueError(
"First Name is Numeric. Enter a valid First Name: "
)
elif strText == "":
raise ValueError("First Name is empty. Enter a valid First Name: ")
except ValueError as e:
print(e)
else:
break
return strText
@staticmethod
def capture_last_name():
"""Captures Last Name
:param: None
:return: Nothing
"""
while True:
try:
strText = str(input("Enter Last Name: ")).strip()
if strText.isnumeric():
raise ValueError("Last Name is Numeric. Enter a valid Last name: ")
elif strText == "":
raise ValueError("Last Name is empty. Enter a valid Last Name: ")
except ValueError as e:
print(e)
else:
break
return strText
@staticmethod
def capture_address():
"""Captures Address
:param: None
:return: Nothing
"""
while True:
try:
strText = str(input("Enter Address: ")).strip()
if strText.isnumeric():
raise ValueError("Address is Numeric. Enter a valid address: ")
elif strText == "":
raise ValueError("Address is empty. Enter a valid address: ")
except ValueError as e:
print(e)
else:
break
return strText
@staticmethod
def capture_ssn():
"""Captures Social Security Number
:param: None
:return: Nothing
"""
while True:
try:
strText = str(input("Enter ssn (000-00-0000): ")).strip()
if Processor.isValidSSN(strText) == False:
raise ValueError(
"ssn is not in the proper format. Enter a valid ssn: "
)
elif strText == "":
raise ValueError("ssn is empty. Enter a valid ssn: ")
except ValueError as e:
print(e)
else:
break
return strText
@staticmethod
def capture_date_of_birth():
"""Captures Date of Birth
:param: None
:return: Nothing
"""
formt = "%m/%d/%Y"
while True:
try:
strText = str(
input("Enter Date of Birth, MM/DD/YYYY (%m/%d/%Y): ")
).strip()
res = bool(datetime.datetime.strptime(strText, formt))
except ValueError as e:
print(e)
else:
break
return strText
@staticmethod
def capture_job_title():
"""Captures Job Title
:param: None
:return: Nothing
"""
while True:
try:
strText = str(input("Enter Job Title: ")).strip()
if strText.isnumeric():
raise ValueError("Job Title is Numeric. Enter a valid Job Title: ")
elif strText == "":
raise ValueError("Job Title is empty. Enter a valid Job Title: ")
except ValueError as e:
print(e)
else:
break
return strText
@staticmethod
def capture_start_date():
"""Captures Start Date
:param: None
:return: Nothing
"""
formt = "%m/%d/%Y"
while True:
try:
strText = str(
input("Enter Start Date, MM/DD/YYYY (%m/%d/%Y): ")
).strip()
res = bool(datetime.datetime.strptime(strText, formt))
except ValueError as e:
print(e)
else:
break
return strText
@staticmethod
def capture_end_date():
"""Captures End Date
:param: None
:return: Nothing
"""
formt = "%m/%d/%Y"
while True:
strText = (
str(input("Enter End Date, MM/DD/YYYY (%m/%d/%Y): ")).strip().upper()
)
if strText != "NONE":
try:
res = bool(datetime.datetime.strptime(strText, formt))
except ValueError as e:
print(e)
else:
break
else:
break
return strText
@staticmethod
def activate_reminders(dframe):
"""Activate the reminders
:param: None
:return: Nothing
"""
IO.print_review_reminders(dframe)
@staticmethod
def print_header():
"""Prints the header of the report
:param: None
:return: nothing
"""
print(
"+--------------+-------------+------------+--------------"
"+-----------+-------------+---------------+------------"
"+---------------------+-----------+"
)
@staticmethod
def print_footer():
"""Prints the footer of the report
:param: None
:return: nothing
"""
print(
"+--------------+-------------+------------+--------------"
"+-----------+-------------+---------------+------------"
"+---------------------+-----------+"
)
# Main Body of Script ------------------------------------------------------ #
if __name__ == "__main__":
while True:
# reminder for annual review can be a separate class
print(result)
print("Menu of Options")
print(IO.get_menu(1))
print(IO.get_menu(2))
print(IO.get_menu(3))
print(IO.get_menu(4))
print(IO.get_menu(5))
print(IO.get_menu(6))
print(IO.get_menu(7))
IO.activate_reminders(IO.get_employee_db())
# menu printed
strChoice = IO.input_menu_choice() # Get menu option
s = UserSelection()
s.switch(
strChoice
) # Calls the UserSelection class to handle the tasks in the menu
IO.input_press_to_continue(strStatus)
continue # to show the menu
| 29.783237
| 120
| 0.539835
| 2,215
| 20,610
| 4.948081
| 0.146275
| 0.036953
| 0.019161
| 0.01104
| 0.452555
| 0.40292
| 0.365876
| 0.327281
| 0.25438
| 0.211953
| 0
| 0.007036
| 0.35182
| 20,610
| 691
| 121
| 29.826339
| 0.813384
| 0.266861
| 0
| 0.473282
| 0
| 0.002545
| 0.153402
| 0.024866
| 0
| 0
| 0
| 0
| 0
| 1
| 0.10687
| false
| 0.015267
| 0.020356
| 0.017812
| 0.201018
| 0.127226
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
5501abe3bf73d0c0ea6df544abcad09c5f1dc8eb
| 8,706
|
py
|
Python
|
src/pipeline/featureranking.py
|
heindorf/wsdmcup17-wdvd-classification
|
7c75447370b0645276e1f918ed1215a3e8a6c62e
|
[
"MIT"
] | 2
|
2018-03-21T13:21:43.000Z
|
2018-06-13T21:58:51.000Z
|
src/pipeline/featureranking.py
|
wsdm-cup-2017/wsdmcup17-wdvd-classification
|
7c75447370b0645276e1f918ed1215a3e8a6c62e
|
[
"MIT"
] | null | null | null |
src/pipeline/featureranking.py
|
wsdm-cup-2017/wsdmcup17-wdvd-classification
|
7c75447370b0645276e1f918ed1215a3e8a6c62e
|
[
"MIT"
] | 2
|
2018-03-21T14:07:32.000Z
|
2020-02-24T10:40:52.000Z
|
# -----------------------------------------------------------------------------
# WSDM Cup 2017 Classification and Evaluation
#
# Copyright (c) 2017 Stefan Heindorf, Martin Potthast, Gregor Engels, Benno Stein
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# -----------------------------------------------------------------------------
import itertools
import logging
import pandas as pd
from sklearn import ensemble
from sklearn.externals.joblib import Parallel, delayed
import config
from src import evaluationutils
_logger = logging.getLogger()
########################################################################
# Feature Ranking
########################################################################
def rank_features(training, validation):
_logger.info("Ranking features...")
metrics = _compute_metrics_for_single_features(training, validation)
group_metrics = _compute_metrics_for_feature_groups(training, validation)
metrics = pd.concat([metrics, group_metrics], axis=0)
_output_sorted_by_group(
validation.get_time_label(), validation.get_system_name(),
metrics, validation.get_group_names(), validation.get_subgroup_names())
_logger.info("Ranking features... done.")
def _compute_metrics_for_single_features(training, validation):
"""Return a Pandas data frame with metrics for every single feature."""
arguments = []
for feature in validation.get_features():
# each feature name is a tuple itself and
# here we take the last element of this tuple
training2 = training.select_feature(feature[-1])
validation2 = validation.select_feature(feature[-1])
argument = (training2, validation2, feature, )
arguments.append(argument)
result_list = Parallel(n_jobs=config.FEATURE_RANKING_N_JOBS,
backend='multiprocessing')(
delayed(_compute_feature_metrics_star)(x) for x in arguments)
result = pd.concat(result_list, axis=0)
return result
def _compute_metrics_for_feature_groups(training, validation):
arguments = []
for subgroup in validation.get_subgroups():
# each feature name is a tuple itself and here we take the last
# element of this tuple
training2 = training.select_subgroup(subgroup[-1])
validation2 = validation.select_subgroup(subgroup[-1])
argument = (training2, validation2, subgroup + ('ALL', ), )
arguments.append(argument)
for group in validation.get_groups():
training2 = training.select_group(group)
validation2 = validation.select_group(group)
argument = (training2, validation2, (group, 'ALL', 'ALL'),)
arguments.append(argument)
result_list = Parallel(n_jobs=config.FEATURE_RANKING_N_JOBS,
backend='multiprocessing')(
delayed(_compute_feature_metrics_star)(x) for x in arguments)
result = pd.concat(result_list, axis=0)
return result
# This method is called by multiple processes
def _compute_feature_metrics_star(args):
return _compute_feature_metrics(*args)
# This method is called by multiple processes
def _compute_feature_metrics(training, validation, label):
_logger.debug("Computing metrics for %s..." % str(label))
index = pd.MultiIndex.from_tuples(
[label], names=['Group', 'Subgroup', 'Feature'])
_logger.debug("Using random forest...")
clf = ensemble.RandomForestClassifier(random_state=1, verbose=0, n_jobs=-1)
evaluationutils.fit(clf, training, index)
y_pred, y_score = evaluationutils.predict(clf, validation, index)
validation_result = evaluationutils.compute_metrics(
index, validation.get_metrics_meta(), validation.get_Y(), y_score, y_pred)
# computing the feature metrics on the training set is useful for
# identifying overfitting
training_y_pred, training_y_score = evaluationutils.predict(clf, training, index)
training_result = evaluationutils.compute_metrics_for_mask(
index, evaluationutils.get_content_mask(training.get_metrics_meta(), 'ALL'), 'ALL',
training.get_Y(), training_y_score, training_y_pred)
training_result.columns = list(itertools.product(
['TRAINING'], training_result.columns.values))
result = pd.concat([validation_result, training_result], axis=1)
return result
def _output_sorted_by_auc_pr(time_label, system_name, metrics):
"""Output the metrics sorted by area under precision-recall curve."""
_logger.debug("output_sorted_by_auc_pr...")
metrics.sort_values([('ALL', 'PR')], ascending=False, inplace=True)
metrics.to_csv(config.OUTPUT_PREFIX + "_" + time_label + "_" +
system_name + "_feature_ranking.csv")
latex = metrics.loc[:, evaluationutils.COLUMNS]
# latex.reset_index(drop=True, inplace=True)
latex.to_latex(config.OUTPUT_PREFIX + "_" + time_label + "_" +
system_name + "_feature_ranking.tex", float_format='{:.3f}'.format)
n_features = min(9, len(metrics) - 1)
selection = metrics.iloc[0:n_features] \
.loc[:, [('ALL', 'Feature'), ('ALL', 'PR')]]
_logger.info("Top 10 for all content\n" +
(selection.to_string(float_format='{:.4f}'.format)))
_logger.debug("output_sorted_by_auc_pr... done.")
def _output_sorted_by_group(
time_label, system_name, metrics, group_names, subgroup_names):
"""Output the metrics sorted by group and by PR-AUC within a group."""
_logger.debug('_output_sorted_by_group...')
sort_columns = ['_Group', '_Subgroup', '_Order', '_Feature']
ascending_columns = [True, True, False, True]
metrics['_Group'] = metrics.index.get_level_values('Group')
metrics['_Subgroup'] = metrics.index.get_level_values('Subgroup')
metrics['_Feature'] = metrics.index.get_level_values('Feature')
subgroup_names = ['ALL'] + subgroup_names
# Define the order of groups and subgroups
metrics['_Group'] = metrics['_Group'].astype('category').cat.set_categories(
group_names, ordered=True)
metrics['_Subgroup'] = metrics['_Subgroup'].astype('category').cat.set_categories(
subgroup_names, ordered=True)
# Sort the features by AUC_PR and make sure the subgroup is always shown
# before the single features
metrics['_Order'] = metrics[('ALL', 'PR')]
# without this line, the following line causes a PerformanceWarning
metrics.sort_index(inplace=True)
metrics.loc[(metrics['_Feature'] == 'ALL'), '_Order'] = 1.0
metrics.sort_values(by=sort_columns,
ascending=ascending_columns, inplace=True)
metrics = metrics.drop(sort_columns, axis=1)
metrics.to_csv(config.OUTPUT_PREFIX + "_" + time_label + "_" +
system_name + "_feature_groups.csv")
latex_names = metrics.apply(_compute_latex_name, axis=1)
metrics.set_index(latex_names, inplace=True)
metrics = evaluationutils.remove_columns(metrics, evaluationutils.CURVES)
metrics = evaluationutils.remove_columns(metrics, evaluationutils.STATISTICS)
evaluationutils.print_metrics_to_latex(
metrics, config.OUTPUT_PREFIX + "_" + time_label + "_" +
system_name + "_feature_groups.tex")
_logger.debug('_output_sorted_by_group... done.')
def _compute_latex_name(row):
group = row.name[0]
subgroup = row.name[1]
feature = row.name[2]
# Is group?
if subgroup == 'ALL' and feature == 'ALL':
result = "\\quad %s" % group
# Is subgroup?
elif feature == 'ALL':
result = "\\quad\quad %s" % subgroup
# Is feature?
else:
result = "\\quad\\quad\\quad %s" % feature
return result
| 39.93578
| 91
| 0.68045
| 1,047
| 8,706
| 5.434575
| 0.25788
| 0.012654
| 0.017223
| 0.020035
| 0.288049
| 0.231986
| 0.201406
| 0.156766
| 0.156766
| 0.136731
| 0
| 0.006333
| 0.183781
| 8,706
| 217
| 92
| 40.119816
| 0.794399
| 0.245693
| 0
| 0.158333
| 0
| 0
| 0.098304
| 0.016332
| 0
| 0
| 0
| 0
| 0
| 1
| 0.066667
| false
| 0
| 0.058333
| 0.008333
| 0.166667
| 0.008333
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
550309304b59f46612eb7c9d7614edf6b323939f
| 25,470
|
py
|
Python
|
libjmp.py
|
RenolY2/obj2bjmp
|
de5ea2acf4493bec4c1b918b38099685fd9b864e
|
[
"MIT"
] | null | null | null |
libjmp.py
|
RenolY2/obj2bjmp
|
de5ea2acf4493bec4c1b918b38099685fd9b864e
|
[
"MIT"
] | null | null | null |
libjmp.py
|
RenolY2/obj2bjmp
|
de5ea2acf4493bec4c1b918b38099685fd9b864e
|
[
"MIT"
] | null | null | null |
from struct import unpack, pack
from math import ceil, inf, acos, degrees
from vectors import Vector3, Triangle, Vector2, Matrix3x3
from re import match
UPVECTOR = Vector3(0.0, 1.0, 0.0)
FWVECTOR = Vector3(1.0, 0.0, 0.0)
SIDEVECTOR = Vector3(0.0, 0.0, 1.0)
def round_vector(vector, digits):
vector.x = round(vector.x, digits)
vector.y = round(vector.y, digits)
vector.z = round(vector.z, digits)
def read_vertex(v_data):
split = v_data.split("/")
if len(split) == 3:
vnormal = int(split[2])
else:
vnormal = None
v = int(split[0])
return v#, vnormal
def read_uint32(f):
val = f.read(0x4)
return unpack(">I", val)[0]
def read_float_tripple(f):
val = f.read(0xC)
return unpack(">fff", val)
def read_vector3(f):
xyz = unpack(">fff", f.read(0xC))
return Vector3(*xyz)
def read_float(f):
val = f.read(0x4)
return unpack(">f", val)[0]
def read_uint16(f):
return unpack(">H", f.read(2))[0]
def write_uint32(f, val):
f.write(pack(">I", val))
def write_uint16(f, val):
f.write(pack(">H", val))
def write_vector3(f, vector):
f.write(pack(">fff", vector.x, vector.y, vector.z))
def write_float(f, val):
f.write(pack(">f", val))
def read_obj(objfile):
vertices = []
faces = []
face_normals = []
normals = []
floor_type = None
smallest_x = smallest_z = biggest_x = biggest_z = None
for line in objfile:
line = line.strip()
args = line.split(" ")
if len(args) == 0 or line.startswith("#"):
continue
cmd = args[0]
if cmd == "v":
# print(args)
for i in range(args.count("")):
args.remove("")
x, y, z = map(float, args[1:4])
vertices.append((x, y, z))
if smallest_x is None:
# Initialize values
smallest_x = biggest_x = x
smallest_z = biggest_z = z
else:
if x < smallest_x:
smallest_x = x
elif x > biggest_x:
biggest_x = x
if z < smallest_z:
smallest_z = z
elif z > biggest_z:
biggest_z = z
elif cmd == "f":
# if it uses more than 3 vertices to describe a face then we panic!
# no triangulation yet.
if len(args) == 5:
# raise RuntimeError("Model needs to be triangulated! Only faces with 3 vertices are supported.")
v1, v2, v3, v4 = map(read_vertex, args[1:5])
# faces.append(((v1[0] - 1, v1[1]), (v3[0] - 1, v3[1]), (v2[0] - 1, v2[1])))
# faces.append(((v3[0] - 1, v3[1]), (v1[0] - 1, v1[1]), (v4[0] - 1, v4[1])))
faces.append((v1, v2, v3, floor_type))
faces.append((v3, v4, v1, floor_type))
elif len(args) == 4:
v1, v2, v3 = map(read_vertex, args[1:4])
# faces.append(((v1[0]-1, v1[1]), (v3[0]-1, v3[1]), (v2[0]-1, v2[1])))
faces.append((v1, v2, v3, floor_type))
else:
raise RuntimeError("Model needs to be triangulated! Only faces with 3 or 4 vertices are supported.")
# if len(args) != 4:
# raise RuntimeError("Model needs to be triangulated! Only faces with 3 vertices are supported.")
# v1, v2, v3 = map(read_vertex, args[1:4])
# faces.append((v1, v2, v3, floor_type))
elif cmd == "vn":
nx, ny, nz = map(float, args[1:4])
normals.append((nx, ny, nz))
elif cmd == "usemtl":
assert len(args) >= 2
matname = " ".join(args[1:])
floor_type_match = match("^(.*?)(0x[0-9a-fA-F]{4})(.*?)$", matname)
if floor_type_match is not None:
floor_type = int(floor_type_match.group(2), 16)
else:
floor_type = None
# print("Found material:", matname, "Using floor type:", hex(floor_type))
# objects.append((current_object, vertices, faces))
return vertices, faces, normals, (smallest_x, smallest_z, biggest_x, biggest_z)
class BoundaryBox(object):
def __init__(self):
self.start = None
self.end = None
self.mid = None
@classmethod
def from_vector(cls, start, end):
bbox = cls()
bbox.start = start.copy()
bbox.end = end.copy()
bbox.mid = (bbox.start + bbox.end) / 2.0
return bbox
@classmethod
def from_file(cls, f):
bbox = cls()
bbox.start = Vector3(*read_float_tripple(f))
bbox.end = Vector3(*read_float_tripple(f))
return bbox
def write(self, f):
write_vector3(f, self.start)
write_vector3(f, self.end)
def size(self):
diff = self.end - self.start
diff.x = abs(diff.x)
diff.y = abs(diff.y)
diff.z = abs(diff.z)
return diff
def scale(self, x, y, z):
mid = (self.start + self.end) / 2.0
p1 = self.start - mid
p2 = self.end - mid
p1.x *= x
p1.y *= y
p1.z *= z
p2.x *= x
p2.y *= y
p2.z *= z
self.start = p1 + mid
self.end = p2 + mid
def contains(self, triangle):
p1, p2, p3 = triangle.origin, triangle.p2, triangle.p3
start, end = self.start, self.end
min_x = min(p1.x, p2.x, p3.x)# - self.mid.x
max_x = max(p1.x, p2.x, p3.x)# - self.mid.x
min_z = min(p1.z, p2.z, p3.z)# - self.mid.z
max_z = max(p1.z, p2.z, p3.z)# - self.mid.z
if max_x < start.x or min_x > end.x:
return False
if max_z < start.z or min_z > end.z:
return False
return True
class BJMPTriangle(object):
def __init__(self):
self._p1_index = None
self._p2_index = None
self._p3_index = None
self.triangle = None
self.data = None
self.normal = None
self.d = 0
self.binormal = None
self.tangent = None
self.p1 = None
self.edge_normal1 = None
self.edge_normal1_d = 0
self.p2 = None
self.edge_normal2 = None
self.edge_normal2_d = 0
self.p3 = None
self.edge_normal3 = None
self.edge_normal3_d = 0
self.coll_data = 0x100
def is_wall(self, normal):
return degrees(acos(normal.cos_angle(Vector3(0.0, 1.0, 0.0)))) > 45
@classmethod
def from_triangle(cls, triangle, coll_data=None):
tri = cls()
tri.triangle = triangle
triangle.normal *= -1
round_vector(triangle.normal, 6)
tri.coll_data = coll_data
tri.normal = triangle.normal
if not tri.is_wall(tri.normal):
tri.binormal = triangle.normal.cross(FWVECTOR) #*-1
flip = True
if tri.binormal.norm() == 0:
#tri.binormal = triangle.normal.cross(UPVECTOR) *-1
#flip = True
tri.binormal = UPVECTOR.copy()
tri.binormal.normalize()
tri.tangent = Vector3(0.0, 0.0, 0.0)
tri.tangent = triangle.normal.cross(tri.binormal)#*-1
tri.tangent.normalize()
if coll_data is None:
tri.coll_data = 0x0100
else:
tri.binormal = triangle.normal.cross(UPVECTOR) #*-1
flip = True
if tri.binormal.norm() == 0:
#tri.binormal = triangle.normal.cross(UPVECTOR) *-1
#flip = True
tri.binormal = FWVECTOR.copy()
tri.binormal.normalize()
tri.tangent = Vector3(0.0, 0.0, 0.0)
tri.tangent = triangle.normal.cross(tri.binormal)#*-1
tri.tangent.normalize()
tri.binormal *= -1
tri.tangent *= -1
if coll_data is None:
tri.coll_data = 0x810
#if flip:
# tmp = tri.tangent
# tri.tangent = tri.binormal*-1
# tri.binormal = tmp#*-1
tri.d = tri.normal.dot(triangle.origin)
p1, p2, p3 = triangle.origin, triangle.p2, triangle.p3
#tri.p1 = Vector3(-p1.z, 0, -p1.x)
tri.edge_normal1 = (p2-p1).cross(tri.normal)
tri.edge_normal1.normalize()
#tri.p2 = Vector3(-p2.z, 0, -p2.x)
tri.edge_normal2 = (p3-p2).cross(tri.normal)
tri.edge_normal2.normalize()
tri.edge_normal2_d = tri.edge_normal2.dot(p2)
#tri.p3 = Vector3(-p3.z, 0, -p3.x)
tri.edge_normal3 = (p1-p3).cross(tri.normal)
tri.edge_normal3.normalize()
tri.edge_normal3_d = tri.edge_normal3.dot(p3)
tri.edge_normal1_d = tri.edge_normal1.dot(p1)
nbt = Matrix3x3(
tri.binormal.x, tri.binormal.y, tri.binormal.z,
tri.normal.x, tri.normal.y, tri.normal.z,
tri.tangent.x, tri.tangent.y, tri.tangent.z
)
p1 = p1 - tri.normal*tri.d
p2 = p2 - tri.normal*tri.d
p3 = p3 - tri.normal*tri.d
p1 = nbt.multiply_vec3(p1)
p2 = nbt.multiply_vec3(p2)
p3 = nbt.multiply_vec3(p3)
tri.p1 = p1
tri.p2 = p2
tri.p3 = p3
"""nbt = Matrix3x3(tri.normal.x, tri.normal.y, tri.normal.z,
tri.tangent.x, tri.tangent.y, tri.tangent.z,
tri.binormal.x, tri.binormal.y, tri.binormal.z)
nbt.transpose()
tri.p1 = Vector3(*nbt.multiply_vec3(p1.x, p1.y, p1.z))
tri.p2 = Vector3(*nbt.multiply_vec3(p2.x, p2.y, p2.z))
tri.p3 = Vector3(*nbt.multiply_vec3(p3.x, p3.y, p3.z))"""
return tri
@classmethod
def from_file(cls, f, vertices):
tri = cls()
start = f.tell()
v1, v2, v3 = read_uint16(f), read_uint16(f), read_uint16(f)
tri.triangle = Triangle(vertices[v1], vertices[v2], vertices[v3])
tri.normal = Vector3(*read_float_tripple(f))
tri.d = read_float(f)
tri.binormal = Vector3(*read_float_tripple(f))
tri.tangent = Vector3(*read_float_tripple(f))
tri.p1 = Vector3(read_float(f), 0, read_float(f))
tri.edge_normal1 = Vector3(*read_float_tripple(f))
tri.edge_normal1_d = read_float(f)
tri.p2 = Vector3(read_float(f), 0, read_float(f))
tri.edge_normal2 = Vector3(*read_float_tripple(f))
tri.edge_normal2_d = read_float(f)
tri.p3 = Vector3(read_float(f), 0, read_float(f))
tri.edge_normal3 = Vector3(*read_float_tripple(f))
tri.edge_normal3_d = read_float(f)
tri.coll_data = read_uint16(f)
assert f.tell() - start == 0x78
return tri
def fill_vertices(self, vertices: list):
try:
v1_index = vertices.index(self.triangle.origin)
except ValueError:
v1_index = len(vertices)
vertices.append(self.triangle.origin)
try:
v2_index = vertices.index(self.triangle.p2)
except ValueError:
v2_index = len(vertices)
vertices.append(self.triangle.p2)
try:
v3_index = vertices.index(self.triangle.p3)
except ValueError:
v3_index = len(vertices)
vertices.append(self.triangle.p3)
self._p1_index = v1_index
self._p2_index = v2_index
self._p3_index = v3_index
def write(self, f):
write_uint16(f, self._p1_index)
write_uint16(f, self._p2_index)
write_uint16(f, self._p3_index)
write_vector3(f, self.normal)
write_float(f, self.d)
write_vector3(f, self.binormal)
write_vector3(f, self.tangent)
write_float(f, self.p1.x)
write_float(f, self.p1.z)
write_vector3(f, self.edge_normal1)
write_float(f, self.edge_normal1_d)
write_float(f, self.p2.x)
write_float(f, self.p2.z)
write_vector3(f, self.edge_normal2)
write_float(f, self.edge_normal2_d)
write_float(f, self.p3.x)
write_float(f, self.p3.z)
write_vector3(f, self.edge_normal3)
write_float(f, self.edge_normal3_d)
write_uint16(f, self.coll_data)
class Group(object):
def __init__(self):
self._tri_count = 0
self._offset = 0
self.bbox = None
self.tri_indices = []
@classmethod
def from_file(cls, f):
group = cls()
val = read_uint32(f)
group._tri_count = (val >> 24) & 0xFF
group._offset = val & 0xFFFFFF
group.bbox = BoundaryBox.from_file(f)
return group
def read_indices(self, indices):
for i in range(self._tri_count):
self.tri_indices.append(indices[i+self._offset])
def add_indices(self, indices):
self._offset = len(indices)
self._tri_count = len(self.tri_indices)
for index in self.tri_indices:
indices.append(index)
def write(self, f):
assert self._tri_count <= 0xFF
assert self._offset <= 0xFFFFFF
write_uint32(f, self._tri_count << 24 | self._offset)
self.bbox.write(f)
class CollisionGroups(object):
def __init__(self):
self.bbox = None
self.grid_x = 0
self.grid_y = 0
self.grid_z = 0
self.cell_dimensions = None
self.cell_inverse = None
self.groups = []
#self.indices = []
@classmethod
def from_model(cls, model):
pass
@classmethod
def from_file(cls, f):
colgroups = cls()
colgroups.bbox = BoundaryBox.from_file(f)
colgroups.grid_x = read_uint32(f)
colgroups.grid_y = read_uint32(f)
colgroups.grid_z = read_uint32(f)
colgroups.cell_dimensions = read_vector3(f)
colgroups.cell_inverse = read_vector3(f)
group_count = read_uint32(f)
colgroups.groups = []
for i in range(group_count):
colgroups.groups.append(Group.from_file(f))
indices = []
index_count = read_uint32(f)
for i in range(index_count):
indices.append(read_uint16(f))
for group in colgroups.groups:
group.read_indices(indices)
return colgroups
def write(self, f):
self.bbox.write(f)
write_uint32(f, self.grid_x)
write_uint32(f, self.grid_y)
write_uint32(f, self.grid_z)
write_vector3(f, self.cell_dimensions)
write_vector3(f, self.cell_inverse)
write_uint32(f, len(self.groups))
indices = []
for group in self.groups:
group.add_indices(indices)
group.write(f)
indices = []
for group in self.groups:
indices.extend(group.tri_indices)
write_uint32(f, len(indices))
for index in indices:
write_uint16(f, index)
class BJMP(object):
def __init__(self):
self.bbox_inner = None
self.bbox_outer = None
self.triangles = []
self.collision_groups = CollisionGroups()
@classmethod
def from_obj(cls, f):
vertices = []
uvs = []
faces = []
bjmp = cls()
collision_type = None
smallest_x = smallest_y = smallest_z = biggest_x = biggest_y = biggest_z = None
for line in f:
line = line.strip()
args = line.split(" ")
if len(args) == 0 or line.startswith("#"):
continue
cmd = args[0]
if cmd == "v":
# print(args)
for i in range(args.count("")):
args.remove("")
x, y, z = map(float, args[1:4])
vertices.append(Vector3(x, y, z))
if smallest_x is None:
# Initialize values
smallest_x = biggest_x = x
smallest_y = biggest_y = y
smallest_z = biggest_z = z
else:
if x < smallest_x:
smallest_x = x
elif x > biggest_x:
biggest_x = x
if y < smallest_y:
smallest_y = y
elif y > biggest_y:
biggest_y = y
if z < smallest_z:
smallest_z = z
elif z > biggest_z:
biggest_z = z
elif cmd == "f":
# if it uses more than 3 vertices to describe a face then we panic!
# no triangulation yet.
if len(args) == 5:
# raise RuntimeError("Model needs to be triangulated! Only faces with 3 vertices are supported.")
v1, v2, v3, v4 = map(read_vertex, args[1:5])
# faces.append(((v1[0] - 1, v1[1]), (v3[0] - 1, v3[1]), (v2[0] - 1, v2[1])))
# faces.append(((v3[0] - 1, v3[1]), (v1[0] - 1, v1[1]), (v4[0] - 1, v4[1])))
tri1 = Triangle(vertices[v1 - 1], vertices[v3 - 1], vertices[v2 - 1])
tri2 = Triangle(vertices[v3 - 1], vertices[v1 - 1], vertices[v4 - 1])
if tri1.normal.norm() != 0:
bjmp_tri1 = BJMPTriangle.from_triangle(tri1, collision_type)
bjmp.triangles.append(bjmp_tri1)
if tri2.normal.norm() != 0:
bjmp_tri2 = BJMPTriangle.from_triangle(tri2, collision_type)
bjmp.triangles.append(bjmp_tri2)
elif len(args) == 4:
v1, v3, v2 = map(read_vertex, args[1:4])
# faces.append(((v1[0]-1, v1[1]), (v3[0]-1, v3[1]), (v2[0]-1, v2[1])))
tri1 = Triangle(vertices[v1 - 1], vertices[v2 - 1], vertices[v3 - 1])
if tri1.normal.norm() != 0:
bjmp_tri1 = BJMPTriangle.from_triangle(tri1, collision_type)
bjmp.triangles.append(bjmp_tri1)
else:
raise RuntimeError(
"Model needs to be triangulated! Only faces with 3 or 4 vertices are supported.")
# if len(args) != 4:
# raise RuntimeError("Model needs to be triangulated! Only faces with 3 vertices are supported.")
# v1, v2, v3 = map(read_vertex, args[1:4])
# faces.append((v1, v2, v3, floor_type))
elif cmd == "usemtl":
assert len(args) >= 2
matname = " ".join(args[1:])
floor_type_match = match("^(.*?)(0x[0-9a-fA-F]{4})(.*?)$", matname)
if floor_type_match is not None:
collision_type = int(floor_type_match.group(2), 16)
else:
collision_type = None
# print("Found material:", matname, "Using floor type:", hex(floor_type))"""
bjmp.bbox_inner = BoundaryBox.from_vector(
Vector3(smallest_x, smallest_y, smallest_z),
Vector3(biggest_x, biggest_y, biggest_z)
)
bjmp.bbox_outer = BoundaryBox.from_vector(
bjmp.bbox_inner.start,
bjmp.bbox_inner.end
)
cell_x = 150.0
cell_z = 150.0
bjmp.collision_groups.bbox = bjmp.bbox_inner
bjmp.collision_groups.cell_dimensions = Vector3(cell_x, biggest_y - smallest_y, cell_z)
bjmp.collision_groups.cell_inverse = Vector3( 1.0/bjmp.collision_groups.cell_dimensions.x,
1.0/bjmp.collision_groups.cell_dimensions.y,
1.0/bjmp.collision_groups.cell_dimensions.z)
x_max = int(ceil((biggest_x - smallest_x) / cell_x))
z_max = int(ceil((biggest_z - smallest_z) / cell_z))
start_x = bjmp.bbox_inner.start.x
start_z = bjmp.bbox_inner.start.z
bjmp.collision_groups.grid_x = x_max
bjmp.collision_groups.grid_y = 1
bjmp.collision_groups.grid_z = z_max
for ix in range(x_max):
print(ix, "/", x_max)
for iz in range(z_max):
bbox_x = start_x + ix*cell_x
bbox_z = start_z + iz*cell_z
bbox = BoundaryBox.from_vector(
Vector3(bbox_x, smallest_y, bbox_z),
Vector3(bbox_x+cell_x, biggest_y, bbox_z+cell_z)
)
group = Group()
group.bbox = bbox
min_y = inf
max_y = -inf
for i, triangle in enumerate(bjmp.triangles):
if bbox.contains(triangle.triangle):
tri = triangle.triangle
if tri.origin.y < min_y:
min_y = tri.origin.y
if tri.p2.y < min_y:
min_y = tri.p2.y
if tri.p3.y < min_y:
min_y = tri.p3.y
if tri.origin.y > max_y:
max_y = tri.origin.y
if tri.p2.y > max_y:
max_y = tri.p2.y
if tri.p3.y > max_y:
max_y = tri.p3.y
group.tri_indices.append(i)
if min_y < bbox.start.y:
bbox.start.y = min_y
if max_y > bbox.start.y:
bbox.end.y = max_y
bbox.start.y -= 5.0
bbox.end.y += 5.0
bjmp.collision_groups.groups.append(group)
return bjmp
@classmethod
def from_file(cls, f):
bjmp = cls()
magic = read_uint32(f)
if magic == 0x013304E6:
#self.simple = False
bjmp.bbox_inner = BoundaryBox.from_file(f)
bjmp.bbox_outer = BoundaryBox.from_file(f)
#elif magic == 0x01330237:
# self.simple = True
# self.bbox = BoundaryBox()
else:
raise RuntimeError("Unknown/Unsupported magic: {:x}".format(magic))
vertex_count = read_uint16(f)
vertices = []
for i in range(vertex_count):
vertices.append(read_vector3(f))
bjmp.triangles = []
tri_count = read_uint32(f)
for i in range(tri_count):
bjmp.triangles.append(BJMPTriangle.from_file(f, vertices))
print("Remaining data starts at {0:x}".format(f.tell()))
bjmp.collision_groups = CollisionGroups.from_file(f)
assert f.read() == b""
print("sizes")
print("x z size:", bjmp.collision_groups.grid_x, bjmp.collision_groups.grid_z)
print(bjmp.collision_groups.bbox.size())
print(bjmp.collision_groups.cell_dimensions)
return bjmp
def write(self, f):
write_uint32(f, 0x013304E6)
self.bbox_inner.write(f)
self.bbox_outer.write(f)
vertices = []
for triangle in self.triangles:
triangle.fill_vertices(vertices)
write_uint16(f, len(vertices))
for vertex in vertices:
write_vector3(f, vertex)
write_uint32(f, len(self.triangles))
for triangle in self.triangles:
triangle.write(f)
self.collision_groups.write(f)
if __name__ == "__main__":
import sys
in_name = sys.argv[1]
if in_name.endswith(".obj"):
out_name = in_name + ".bjmp"
with open(in_name, "r") as f:
bjmp = BJMP.from_obj(f)
with open(out_name, "wb") as f:
bjmp.write(f)
elif in_name.endswith(".bjmp"):
out_name = in_name+".obj"
with open(in_name, "rb") as f:
bjmp = BJMP.from_file(f)
with open(out_name, "w") as f:
f.write("# .OBJ generated from Pikmin 2 by Yoshi2's obj2grid.py\n\n")
f.write("# VERTICES BELOW\n\n")
vertex_counter = 0
faces = []
for btriangle in bjmp.triangles:
tri = btriangle.triangle
p1, p2, p3 = tri.origin, tri.p2, tri.p3
f.write("v {} {} {}\n".format(p1.x, p1.y, p1.z))
f.write("v {} {} {}\n".format(p2.x, p2.y, p2.z))
f.write("v {} {} {}\n".format(p3.x, p3.y, p3.z))
#f.write("vt {} {}\n".format(btriangle.p1.x, btriangle.p1.z))
#f.write("vt {} {}\n".format(btriangle.p2.x, btriangle.p2.z))
#f.write("vt {} {}\n".format(btriangle.p3.x, btriangle.p3.z))
faces.append((vertex_counter+1, vertex_counter+2, vertex_counter+3, btriangle.coll_data))
vertex_counter += 3
last_coll = None
for i1, i2, i3, coll in faces:
if coll != last_coll:
f.write("usemtl collision_type0x{:04X}\n".format(coll))
f.write("f {0} {2} {1}\n".format(i1, i2, i3))
print("done")
| 32.322335
| 117
| 0.517314
| 3,258
| 25,470
| 3.88981
| 0.081952
| 0.012231
| 0.003551
| 0.013414
| 0.485994
| 0.375286
| 0.313817
| 0.271601
| 0.249112
| 0.223546
| 0
| 0.042829
| 0.363801
| 25,470
| 788
| 118
| 32.322335
| 0.739262
| 0.083863
| 0
| 0.280142
| 0
| 0
| 0.023467
| 0.003671
| 0
| 0
| 0.003321
| 0
| 0.010638
| 1
| 0.067376
| false
| 0.001773
| 0.008865
| 0.003546
| 0.120567
| 0.012411
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
5504298a3a2d8c197f31284679e78d49ef6eed72
| 585
|
py
|
Python
|
kattis/integerlists.py
|
div5252/competitive-programming
|
111902dff75e79e65213c95055ffb0bb15b76e94
|
[
"WTFPL"
] | 506
|
2018-08-22T10:30:38.000Z
|
2022-03-31T10:01:49.000Z
|
kattis/integerlists.py
|
diegordzr/competitive-programming
|
1443fb4bd1c92c2acff64ba2828abb21b067e6e0
|
[
"WTFPL"
] | 13
|
2019-08-07T18:31:18.000Z
|
2020-12-15T21:54:41.000Z
|
kattis/integerlists.py
|
diegordzr/competitive-programming
|
1443fb4bd1c92c2acff64ba2828abb21b067e6e0
|
[
"WTFPL"
] | 234
|
2018-08-06T17:11:41.000Z
|
2022-03-26T10:56:42.000Z
|
#!/usr/bin/env python3
# https://open.kattis.com/problems/integerlists
for _ in range(int(input())):
p = input()
n = int(input())
i, j = 0, n
xs = input()[1:-1].split(',')
front = True
for c in p:
if c == 'R':
front = not front
elif i == j:
i += 1
break
elif front:
i += 1
else:
j -= 1
if i > j:
print('error')
else:
if front:
print('[' + ','.join(xs[i:j]) + ']')
else:
print('[' + ','.join(xs[i:j][::-1]) + ']')
| 22.5
| 54
| 0.384615
| 74
| 585
| 3.027027
| 0.472973
| 0.044643
| 0.098214
| 0.107143
| 0.116071
| 0
| 0
| 0
| 0
| 0
| 0
| 0.023529
| 0.418803
| 585
| 25
| 55
| 23.4
| 0.635294
| 0.11453
| 0
| 0.217391
| 0
| 0
| 0.025194
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.130435
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
55046b3036b22157a72d92e77888dc355a149d40
| 3,177
|
py
|
Python
|
main/tests/test_middleware.py
|
uktrade/return-to-office
|
d4c53c734611413c9f8a7624e52dc35910c5ff57
|
[
"MIT"
] | 1
|
2020-10-25T18:16:47.000Z
|
2020-10-25T18:16:47.000Z
|
main/tests/test_middleware.py
|
uktrade/return-to-office
|
d4c53c734611413c9f8a7624e52dc35910c5ff57
|
[
"MIT"
] | 1
|
2020-10-27T07:11:26.000Z
|
2020-10-27T07:11:26.000Z
|
main/tests/test_middleware.py
|
uktrade/return-to-office
|
d4c53c734611413c9f8a7624e52dc35910c5ff57
|
[
"MIT"
] | null | null | null |
import pytest
from django.http import HttpResponse
from django.urls import reverse
from main.middleware import IpRestrictionMiddleware
def dummy_view(_):
return HttpResponse(status=200)
class TestIpRestrictionMiddleware:
def test_middleware_is_enabled(self, client, settings):
settings.IP_RESTRICT = True
settings.IP_RESTRICT_APPS = ["admin"]
settings.IP_SAFELIST_XFF_INDEX = -2
assert client.get(reverse("admin:index")).status_code == 401
def test_applies_to_specified_apps_only(self, rf, settings):
"""Only apps listed in `settings.IP_WHITELIST_APPS` should be ip restricted"""
settings.IP_RESTRICT = True
settings.IP_RESTRICT_APPS = ["admin"]
settings.IP_SAFELIST_XFF_INDEX = -2
request = rf.get("/")
assert IpRestrictionMiddleware(dummy_view)(request).status_code == 200
def test_not_enabled_ifip_restrict_is_false(self, rf, settings):
settings.IP_RESTRICT = False
settings.IP_RESTRICT_APPS = ["admin"]
settings.IP_SAFELIST_XFF_INDEX = -2
request = rf.get(reverse("admin:index"), HTTP_X_FORWARDED_FOR="")
assert IpRestrictionMiddleware(dummy_view)(request).status_code == 200
@pytest.mark.parametrize(
"xff_header,expected_status",
(
["1.1.1.1, 2.2.2.2, 3.3.3.3", 200],
["1.1.1.1", 401],
[
"",
401,
],
),
)
def test_x_forwarded_header(self, rf, settings, xff_header, expected_status):
settings.IP_RESTRICT = True
settings.IP_RESTRICT_APPS = ["admin"]
settings.ALLOWED_IPS = ["2.2.2.2"]
settings.IP_SAFELIST_XFF_INDEX = -2
request = rf.get(reverse("admin:index"), HTTP_X_FORWARDED_FOR=xff_header)
assert IpRestrictionMiddleware(dummy_view)(request).status_code == expected_status
@pytest.mark.parametrize(
"allowed_ips,expected_status", ([["2.2.2.2"], 200], [["1.1.1.1"], 401])
)
def test_ips(self, rf, settings, allowed_ips, expected_status):
settings.IP_RESTRICT = True
settings.IP_RESTRICT_APPS = ["admin"]
settings.ALLOWED_IPS = allowed_ips
settings.IP_SAFELIST_XFF_INDEX = -2
request = rf.get(reverse("admin:index"), HTTP_X_FORWARDED_FOR="1.1.1.1, 2.2.2.2, 3.3.3.3")
assert IpRestrictionMiddleware(dummy_view)(request).status_code == expected_status
settings.ALLOWED_IPS = ["3.3.3.3"]
assert IpRestrictionMiddleware(dummy_view)(request).status_code == 401
@pytest.mark.parametrize(
"allowed_ips,expected_status", ([["2.2.2.2"], 200], [["1.1.1.1"], 401])
)
def test_ip_restricted_path(self, rf, settings, allowed_ips, expected_status):
settings.IP_RESTRICT = True
settings.IP_RESTRICT_PATH_NAMES = ["main:show-bookings"]
settings.ALLOWED_IPS = allowed_ips
settings.IP_SAFELIST_XFF_INDEX = -2
request = rf.get(
reverse("main:show-bookings"), HTTP_X_FORWARDED_FOR="1.1.1.1, 2.2.2.2, 3.3.3.3"
)
assert IpRestrictionMiddleware(dummy_view)(request).status_code == expected_status
| 34.912088
| 98
| 0.657224
| 408
| 3,177
| 4.85049
| 0.166667
| 0.096008
| 0.109146
| 0.063669
| 0.674583
| 0.674583
| 0.66953
| 0.66953
| 0.610915
| 0.576049
| 0
| 0.042828
| 0.220963
| 3,177
| 90
| 99
| 35.3
| 0.756768
| 0.022663
| 0
| 0.424242
| 0
| 0.045455
| 0.100032
| 0.025815
| 0
| 0
| 0
| 0
| 0.106061
| 1
| 0.106061
| false
| 0
| 0.060606
| 0.015152
| 0.19697
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
5505c2fad9d4eaf68b407e24b865a1b9411e4836
| 2,418
|
py
|
Python
|
modelproj/topic.py
|
cesell/modelproj
|
313f89784a19842c866fa2563b326e5d044a2301
|
[
"MIT"
] | null | null | null |
modelproj/topic.py
|
cesell/modelproj
|
313f89784a19842c866fa2563b326e5d044a2301
|
[
"MIT"
] | null | null | null |
modelproj/topic.py
|
cesell/modelproj
|
313f89784a19842c866fa2563b326e5d044a2301
|
[
"MIT"
] | null | null | null |
import json
import re
from urllib.request import urlopen
'''
The use of objects has various benefits.
1. Better control of context
2. State that can be evaluated
3. Data can be created and then processing can be added
4. Clean interface
'''
class TopicSummarizer():
"""TopicSummarizer - Summarizes a wikipedia entry
Returns:
str: [Summary of entry]
"""
TEXT_URL_TMP = 'https://en.wikipedia.org/w/api.php?action=query&prop=extracts&exsentences=2&titles={title}&format=json'
THUMB_URL_TMP = 'https://en.wikipedia.org/w/api.php?action=query&prop=pageimages&titles={title}&format=json'
def __init__(self, topic):
self.topic = str(topic)
def process(self):
self._fetch_text()
self._fetch_thumbnail()
return self
def get_results(self, as_text=False):
if as_text:
return self.topic + ' summary: ' + self._text
return TopicSummary(self.topic, self._thumb_url, self._text)
def _fetch_text(self):
self._text_api_url = self.TEXT_URL_TMP.format(title=self.topic)
self._text_resp = self._get_url_json(self._text_api_url)
self._text = list(self._text_resp['query']['pages'].values())[
0]['extract']
def _fetch_thumbnail(self):
self._thumb_api_url = self.THUMB_URL_TMP.format(title=self.topic)
self._thumb_resp = self._get_url_json(self._thumb_api_url)
self._thumb_url = list(self._thumb_resp['query']['pages'].values())[0][
'thumbnail']['source']
def _get_url_json(self, url):
resp = urlopen(url)
resp_body = resp.read()
return json.loads(resp_body)
class TopicSummary():
def __init__(self, topic, thumb_url, text):
self.topic = topic
self.thumb_url = thumb_url
self.text = re.sub(r'</*.>', '', text)
def __repr__(self):
cn = self.__class__.__name__
return '%s(%r, %r, %r)' % (cn, self.topic, self.thumb_url, self.text)
def main():
from argparse import ArgumentParser
prs = ArgumentParser(description='summarize topics from Wikipedia')
prs.add_argument('-t', '--topic', help='the target topic', required='True')
args = prs.parse_args()
print(TopicSummarizer(args.topic).process().get_results(as_text=True))
return
if __name__ == '__main__':
main()
| 30.607595
| 124
| 0.63689
| 317
| 2,418
| 4.564669
| 0.343849
| 0.055287
| 0.044921
| 0.037319
| 0.270214
| 0.241189
| 0.185902
| 0.109191
| 0.064962
| 0.064962
| 0
| 0.003788
| 0.235732
| 2,418
| 78
| 125
| 31
| 0.779221
| 0.035153
| 0
| 0
| 0
| 0.042553
| 0.161306
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.191489
| false
| 0
| 0.085106
| 0
| 0.489362
| 0.021277
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
550824fc3e2f47ccef32bd1ac78448a3f415ba0f
| 4,154
|
py
|
Python
|
wanderingpole/classifyingtweets/train_wandering_old.py
|
ssdorsey/wandering-pole
|
606ad8f1979354e01dea1acf01107b88b3b9e91b
|
[
"MIT"
] | null | null | null |
wanderingpole/classifyingtweets/train_wandering_old.py
|
ssdorsey/wandering-pole
|
606ad8f1979354e01dea1acf01107b88b3b9e91b
|
[
"MIT"
] | null | null | null |
wanderingpole/classifyingtweets/train_wandering_old.py
|
ssdorsey/wandering-pole
|
606ad8f1979354e01dea1acf01107b88b3b9e91b
|
[
"MIT"
] | null | null | null |
import pandas as pd
import numpy as np
import json
from simpletransformers.classification import ClassificationModel, ClassificationArgs
import sklearn
from sklearn.model_selection import train_test_split
import torch
import re
import matplotlib.pyplot as plt
import matplotlib.ticker as ticker
import os
from tqdm import tqdm
np.random.seed(2)
# import the data
# tweets = pd.read_csv('data/postIR_final.csv')
# os.chdir('..')
tweets = pd.read_csv('D:/Dropbox/Twitter/training_data/training_final.csv', encoding='latin1')
# restrict to tweets with coding
tweets = tweets[tweets['uncivil_final'].isin([0,1])]
# subset to just text and labels, fix columns names
tweets = tweets.loc[:, ['text', 'uncivil_final']]
tweets.columns = ['text', 'labels']
# import other batch
mike = pd.read_excel(r'D:\Dropbox\wandering-pole\wanderingpole\data\new_pull_Michael.xls')
mike = mike[['full_text', 'uncivil']]
mike = mike.rename(columns={'full_text': 'text', 'uncivil': 'labels'})
# extra
mike_extra = pd.read_csv(r'D:\Dropbox\wandering-pole\wanderingpole\data\michael_extra.csv')
mike_extra = mike_extra.rename(columns={'full_text': 'text', 'uncivil': 'labels'})
# pull a bunch of old 0's
old_model = pd.read_csv("D:/Dropbox/Twitter/allMCtweets.csv", encoding='latin1')
old_0 = old_model[old_model['polarizing']==0].sample(7432, random_state=619)
old_0 = old_0[['text']]
old_0['labels'] = 0
# combine the new data
tweets = pd.concat([tweets, mike, mike_extra, old_0])
# drop incomplete data
tweets = tweets[tweets['labels'].isin([0,1])]
# drop duplicates
tweets = tweets.drop_duplicates(subset=['text'])
# delete links
# TODO: convert emoticons
re_url = r"(?i)\b((?:https?://|www\d{0,3}[.]|[a-z0-9.\-]+[.][a-z]{2,4}/)(?:[^\s()<>]+|\(([^\s()<>]+|(\([^\s()<>]+\)))*\))+(?:\(([^\s()<>]+|(\([^\s()<>]+\)))*\)|[^\s`!()\[\]{};:'\".,<>?«»“”‘’]))"
tweets['text'] = tweets['text'].replace(re_url, '', regex=True)
# remove retweet header
re_retweet = r"RT\s@\w+:"
tweets['text'] = tweets['text'].replace(re_retweet, '', regex=True)
# double-check for weird excel handling of ampersand
re_amp = r'&'
tweets['text'] = tweets['text'].replace(re_amp, '', regex=True)
# split train/test
# tweets.loc[: , 'split'] = np.random.choice(['train','validate','test'], len(tweets), p=[.85, .15])
# train = tweets.loc[tweets.split=='train']
# validate = tweets.loc[tweets.split=='validate']
# test = tweets.loc[tweets.split=='test']
tweets.loc[: , 'split'] = np.random.choice(['train','test'], len(tweets), p=[.85, .15])
train = tweets.loc[tweets.split=='train']
test = tweets.loc[tweets.split=='test']
# build / train
# weights
counts = train['labels'].value_counts().sort_index()
weights = [(1-(ii/len(train)))*10 for ii in counts]
model_args = ClassificationArgs()
# model_args.use_early_stopping = True
# model_args.early_stopping_delta = 0.01
# model_args.early_stopping_metric = "mcc"
# model_args.early_stopping_metric_minimize = False
# model_args.early_stopping_patience = 5
# model_args.evaluate_during_training_verbose = True
# model_args.evaluate_during_training_steps = 1000
model_args.output_dir = r'Model_berttweet/'
model_args.cache_dir = r'Model_berttweet/'
model_args.overwrite_output_dir = True
model_args.training_batch_size = 1024
model_args.eval_batch_size = 1024
model_args.num_train_epochs = 5
model = ClassificationModel(
'bertweet'
, 'vinai/bertweet-base'
, num_labels=len(tweets['labels'].unique())
# , weight=weights # DO help
, weight=[.8,10]
, use_cuda=True
, args=model_args
)
model.train_model(train)
# Evaluate the model
# model = ClassificationModel('bertweet'
# , 'Model_berttweet/'
# , num_labels=2
# , args={'eval_batch_size':512})
result, model_outputs, wrong_predictions = model.eval_model(test)
y_t = list(test.labels)
y_hat = [np.argmax(a) for a in model_outputs]
print(sklearn.metrics.classification_report(y_true=y_t, y_pred=y_hat))
sklearn.metrics.confusion_matrix(y_true=y_t, y_pred=y_hat)
# put out the results
test.loc[:, 'predicted'] = y_hat
| 32.708661
| 194
| 0.684401
| 587
| 4,154
| 4.660988
| 0.32879
| 0.049342
| 0.027412
| 0.03655
| 0.28326
| 0.218202
| 0.130117
| 0.07383
| 0.035088
| 0.035088
| 0
| 0.017549
| 0.135773
| 4,154
| 126
| 195
| 32.968254
| 0.744011
| 0.287434
| 0
| 0
| 0
| 0.016129
| 0.229374
| 0.12975
| 0
| 0
| 0
| 0.007937
| 0
| 1
| 0
| false
| 0
| 0.193548
| 0
| 0.193548
| 0.016129
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
5508e6dcf9a3120fc2d2a1fa35c2fb918cef92fb
| 3,339
|
py
|
Python
|
Source/common.py
|
joaohenggeler/twitch-chat-highlights
|
826cda239de2e5185266a04c12a8909ae5f98a3b
|
[
"MIT"
] | null | null | null |
Source/common.py
|
joaohenggeler/twitch-chat-highlights
|
826cda239de2e5185266a04c12a8909ae5f98a3b
|
[
"MIT"
] | null | null | null |
Source/common.py
|
joaohenggeler/twitch-chat-highlights
|
826cda239de2e5185266a04c12a8909ae5f98a3b
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
"""
A module that defines any general purpose functions used by all scripts, including loading configuration files,
connecting to the database, and handling Twitch's timestamp formats.
"""
import json
import sqlite3
from datetime import datetime
from typing import Tuple, Union
####################################################################################################
class CommonConfig():
# From the config file.
json_config: dict
client_id: str
access_token: str
database_filename: str
def __init__(self):
with open('config.json') as file:
self.json_config = json.load(file)
self.__dict__.update(self.json_config['common'])
def connect_to_database(self) -> sqlite3.Connection:
db = sqlite3.connect(self.database_filename, isolation_level=None)
db.row_factory = sqlite3.Row
db.execute('''PRAGMA journal_mode = WAL;''')
db.execute('''PRAGMA synchronous = NORMAL;''')
db.execute('''PRAGMA temp_store = MEMORY;''')
db.execute('''
CREATE TABLE IF NOT EXISTS 'Channel'
(
'Id' INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
'Name' VARCHAR(50) NOT NULL UNIQUE
);
''')
db.execute('''
CREATE TABLE IF NOT EXISTS 'Video'
(
'Id' INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
'ChannelId' INTEGER NOT NULL,
'TwitchId' VARCHAR(50) NOT NULL UNIQUE,
'Title' TEXT NOT NULL,
'CreationTime' TIMESTAMP NOT NULL,
'Duration' TIME NOT NULL,
'YouTubeId' VARCHAR(50) UNIQUE,
'Notes' TEXT,
FOREIGN KEY (ChannelId) REFERENCES Channel (Id)
);
''')
# VideoId can be NULL when we're storing messages from a live stream, meaning there's no VOD yet.
db.execute('''
CREATE TABLE IF NOT EXISTS 'Chat'
(
'Id' INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
'ChannelId' INTEGER NOT NULL,
'VideoId' INTEGER,
'Timestamp' TIMESTAMP NOT NULL,
'Message' TEXT NOT NULL,
FOREIGN KEY (ChannelId) REFERENCES Channel (Id),
FOREIGN KEY (VideoId) REFERENCES Video (Id)
);
''')
return db
####################################################################################################
def split_twitch_duration(duration: str) -> Tuple[int, int, int, int]:
# Duration format: 00h00m00s or 00m00s
duration = duration.replace('h', ':').replace('m', ':').replace('s', '')
tokens = duration.split(':', 2)
hours = int(tokens[-3]) if len(tokens) >= 3 else 0
minutes = int(tokens[-2]) if len(tokens) >= 2 else 0
seconds = int(tokens[-1]) if len(tokens) >= 1 else 0
total_seconds = hours * 3600 + minutes * 60 + seconds
return hours, minutes, seconds, total_seconds
def convert_twitch_timestamp_to_datetime(timestamp: str) -> datetime:
# Datetime format: YYYY-MM-DDThh:mm:ss.sssZ
# Where the following precisions where observed:
# - YYYY-MM-DDThh:mm:ss.sssssssssZ
# - YYYY-MM-DDThh:mm:ss.ssZ
# - YYYY-MM-DDThh:mm:ss.sZ
# - YYYY-MM-DDThh:mm:ssZ
# Truncate anything past the microsecond precision.
if '.' in timestamp:
microseconds: Union[str, int]
beginning, microseconds = timestamp.rsplit('.', 1)
microseconds, _ = microseconds.rsplit('Z', 1)
timestamp = beginning + '.' + microseconds[:6].ljust(6, '0') + 'Z'
timestamp = timestamp.replace('Z', '+00:00')
return datetime.fromisoformat(timestamp)
| 30.081081
| 112
| 0.632824
| 411
| 3,339
| 5.068127
| 0.413625
| 0.040326
| 0.033605
| 0.031205
| 0.209313
| 0.159386
| 0.1229
| 0.05953
| 0.05953
| 0.05953
| 0
| 0.016987
| 0.188979
| 3,339
| 111
| 113
| 30.081081
| 0.752216
| 0.180593
| 0
| 0.202899
| 0
| 0
| 0.417958
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.057971
| false
| 0
| 0.057971
| 0
| 0.231884
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
550ab4d7e165fcec5a4c9ed00a1fc8a3d4f624ba
| 986
|
py
|
Python
|
unicodetest.py
|
conradstorz/SpeedTrivia
|
e222831223c704f5bb169d4c2d475c9e2a8c4c08
|
[
"Apache-2.0"
] | null | null | null |
unicodetest.py
|
conradstorz/SpeedTrivia
|
e222831223c704f5bb169d4c2d475c9e2a8c4c08
|
[
"Apache-2.0"
] | 1
|
2021-04-26T22:47:19.000Z
|
2021-04-26T22:47:19.000Z
|
unicodetest.py
|
conradstorz/SpeedTrivia
|
e222831223c704f5bb169d4c2d475c9e2a8c4c08
|
[
"Apache-2.0"
] | null | null | null |
from unidecode import unidecode
from unicodedata import name
import ftfy
for i in range(33, 65535):
if i > 0xEFFFF:
continue # Characters in Private Use Area and above are ignored
if 0xD800 <= i <= 0xDFFF:
continue
h = hex(i)
u = chr(i)
f = ftfy.fix_text(u, normalization="NFKC")
a = unidecode(u)
if a != "[?]" and len(u) != 0 and len(a) != 0 and len(f) != 0:
new_char = ""
if u != f:
for c in list(f):
new_char += "{}, ".format(ord(c))
new_char = new_char[:-2]
else:
new_char = "Same"
try:
txt = name(u).lower()
# print(txt)
if 'mark' in txt:
print(
f"dec={i} hex={h} unicode_chr={u} ftfy_chr(s)={f} ftfy_dec={new_char}\n",
f"ascii_chr={a} uni_len={len(u)} ascii_len={len(a)} unicode_name={name(u)}"
)
except ValueError:
pass
| 30.8125
| 95
| 0.485801
| 136
| 986
| 3.419118
| 0.441176
| 0.090323
| 0.030108
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.027642
| 0.376268
| 986
| 31
| 96
| 31.806452
| 0.728455
| 0.063895
| 0
| 0.068966
| 0
| 0.068966
| 0.173913
| 0.046739
| 0
| 0
| 0.020652
| 0
| 0
| 1
| 0
| false
| 0.034483
| 0.103448
| 0
| 0.103448
| 0.034483
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
550c04ca9a44d927c37f244ee230dced2cf832ce
| 4,387
|
py
|
Python
|
app/weibo/views.py
|
guoweikuang/weibo_project
|
38cb2a6d72a16f2f8c1714e83564c833f8e4af0c
|
[
"Apache-2.0"
] | 4
|
2019-03-25T08:47:22.000Z
|
2021-03-16T02:39:29.000Z
|
app/weibo/views.py
|
guoweikuang/weibo_project
|
38cb2a6d72a16f2f8c1714e83564c833f8e4af0c
|
[
"Apache-2.0"
] | 1
|
2020-01-06T03:37:46.000Z
|
2020-01-06T03:37:46.000Z
|
app/weibo/views.py
|
guoweikuang/weibo_project
|
38cb2a6d72a16f2f8c1714e83564c833f8e4af0c
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
~~~~~~~~~~~~~~~~~~~~~~
main module
#author guoweikuang
"""
from flask import render_template
from flask import redirect
from flask import url_for
from flask import request
from flask_login import login_required
from pyecharts import Bar
from pyecharts.utils import json_dumps
#from pyecharts import json_dumps
import json
from . import weibo
from .forms import CrawlForm
from .forms import AnalyzeForm
from app import run_async_crawl
from app import run_build_vsm
from ..utils import filter_data
from ..utils import run_k_means
from ..utils import classify_k_cluster
from ..utils import get_mysql_content
from ..utils import get_mysql_opinion
from ..utils import run_old_all_process
from ..utils import get_max_hot_keyword_chart
from ..utils import list_top_hot_topic
from ..utils import get_hot_text_from_category
from ..utils import bar_chart
REMOTE_HOST = "https://pyecharts.github.io/assets/js"
@weibo.route('/', methods=['GET', 'POST'])
def index():
""" 首页 """
rows = list_top_hot_topic(db=1)
category = request.values.get('topic')
categorys = [cate[0] for cate in rows]
results = []
if category:
categorys.remove(category)
categorys.insert(0, category)
results = get_hot_text_from_category(category, db=0)
else:
category = categorys[0]
results = get_hot_text_from_category(category, db=0)
print(results)
return render_template('weibo/index.html', rows=rows, categorys=categorys, contents=results)
@weibo.route('/crawl', methods=['GET', 'POST'])
@login_required
def crawl():
""" 爬取模块 """
crawl_form = CrawlForm()
result = get_mysql_content(days=1)
if crawl_form.validate_on_submit():
result = run_async_crawl(start_page=crawl_form.start_page.data,
end_page=crawl_form.end_page.data)
return redirect(url_for('weibo.crawl'))
return render_template('weibo/crawl.html', form=crawl_form, results=result)
@weibo.route('/analyze', methods=['GET', 'POST'])
@login_required
def analyze():
""" 聚类分析
:return:
"""
analyze_form = AnalyzeForm()
if analyze_form.validate_on_submit():
k = analyze_form.k_cluster.data
run_old_all_process(start_time=analyze_form.start_time.data,
end_time=analyze_form.end_time.data,
k=analyze_form.k_cluster.data)
#datas = run_build_vsm(start_time=analyze_form.start_time.data,
# end_time=analyze_form.end_time.data)
#labels = run_k_means(k=k)
#classify_k_cluster(labels=labels, datas=datas)
return redirect(url_for("weibo.display"))
return render_template('weibo/analyze.html', form=analyze_form)
@weibo.route('/display', methods=['GET', 'POST'])
@login_required
def display():
""" 图表展示.
:return:
"""
result = {}
keywords, img_name, rows, category = get_max_hot_keyword_chart(db=1)
name = "images/%s/%s" % (category, img_name)
results = sorted(keywords.items(), key=lambda d: d[1], reverse=True)[::-1]
keywords = [key.decode('utf-8') for key, value in results]
rows = [row.split('\t') for row in rows]
return render_template('weibo/display.html',
img_name=name,
keywords=keywords,
rows=rows)
@weibo.route('/sensitive', methods=['GET', 'POST'])
@login_required
def sensitive():
"""
敏感词.
:return:
"""
results = get_mysql_opinion()
opinion = ['心理健康', '社会突发事件', '校园安全', '反动言论']
sen_type = request.values.get("category")
if sen_type:
opinion.remove(sen_type)
opinion.insert(0, sen_type)
rows = results[sen_type]
else:
rows = results[opinion[0]]
return render_template('weibo/sensitive.html', rows=rows, categorys=opinion)
@weibo.route('/pyecharts', methods=['GET', 'POST'])
@login_required
def show_chart():
""" test chart.
:return:
"""
bar = bar_chart()
return render_template('pyecharts.html',
chart_id=bar.chart_id,
host=REMOTE_HOST,
my_width='100%',
my_height=600,
my_option=json_dumps(bar.options),
script_list=bar.get_js_dependencies())
| 29.843537
| 96
| 0.641213
| 555
| 4,387
| 4.834234
| 0.25045
| 0.045099
| 0.055908
| 0.04659
| 0.209467
| 0.149832
| 0.076034
| 0.076034
| 0.076034
| 0.046217
| 0
| 0.005958
| 0.234785
| 4,387
| 146
| 97
| 30.047945
| 0.793268
| 0.085708
| 0
| 0.092784
| 0
| 0
| 0.077041
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.061856
| false
| 0
| 0.237113
| 0
| 0.381443
| 0.010309
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
550fca2bdb3d0148522e4c33f7841bfbb8e59b80
| 3,109
|
py
|
Python
|
remote-access/remote_connect.py
|
sag-tgo/thin-edge.io_examples
|
7da43f330b640d48c2b0f3be2594ff85fe5c9dfe
|
[
"Apache-2.0"
] | 3
|
2021-06-07T19:11:23.000Z
|
2022-02-03T16:20:27.000Z
|
remote-access/remote_connect.py
|
sag-tgo/thin-edge.io_examples
|
7da43f330b640d48c2b0f3be2594ff85fe5c9dfe
|
[
"Apache-2.0"
] | 5
|
2021-11-04T09:44:36.000Z
|
2022-03-30T22:19:11.000Z
|
remote-access/remote_connect.py
|
sag-tgo/thin-edge.io_examples
|
7da43f330b640d48c2b0f3be2594ff85fe5c9dfe
|
[
"Apache-2.0"
] | 11
|
2021-06-16T14:04:01.000Z
|
2022-03-17T08:29:54.000Z
|
import logging
from c8ydp.device_proxy import DeviceProxy, WebSocketFailureException
from threading import Thread
import threading
from c8yMQTT import C8yMQTT
import concurrent.futures
import os
logging.basicConfig(level=logging.INFO,format='%(asctime)s %(name)s %(message)s')
logger = logging.getLogger(__name__)
def setCommandExecuting(command):
logger.info('Setting command: '+ command + ' to executing')
c8y.publish('c8y/s/us','501,'+command)
def setCommandSuccessfull(command):
logger.info('Setting command: '+ command + ' to successful')
c8y.publish('c8y/s/us','503,'+command)
def setCommandFailed(command,errorMessage):
logger.info('Setting command: '+ command + ' to failed cause: ' +errorMessage)
c8y.publish('c8y/s/us','502,'+command+','+errorMessage)
def on_message(client, obj, msg):
message = msg.payload.decode('utf-8')
logger.debug("Message Received: " + msg.topic + " " + str(msg.qos) + " " + message)
if message.startswith('71'):
fields = message.split(",")
c8y.token = fields[1]
logger.info('New JWT Token received')
if message.startswith('530'):
fields = message.split(",")
tcp_host = fields[2]
tcp_port = int(fields[3])
connection_key = fields[4]
c8y.logger.info('Received Remote Connect.')
setCommandExecuting('c8y_RemoteAccessConnect')
with concurrent.futures.ThreadPoolExecutor() as executor:
future = executor.submit(remoteConnect,tcp_host,tcp_port,connection_key,'https://'+url )
return_value = future.result()
c8y.logger.info('Remote Connect Result:' + return_value)
if return_value.startswith('success'):
setCommandSuccessfull('c8y_RemoteAccessConnect')
else:
setCommandFailed('c8y_RemoteAccessConnect',return_value)
def remoteConnect( tcp_host,tcp_port,connection_key,base_url):
try:
c8y.logger.info('Starting Remote to: ' + str(tcp_host) + ':' + str(tcp_port) + ' Key: ' + str(connection_key) + ' url: ' + str(base_url))
devProx = DeviceProxy( tcp_host,
tcp_port,
None,
connection_key,
base_url,
None,
None,
c8y.token
)
devProx.connect()
logger.info('Remote Connection successfull finished')
return 'success'
except Exception as e:
logger.error('Remote Connection error:' + str(e))
return str(e)
stream = os.popen('sudo tedge config get c8y.url')
url=stream.read().strip()
logger.info('Got tenant URL: '+ url)
c8y = C8yMQTT('remote_connect','localhost',1883,'c8y/s/ds,c8y/s/e,c8y/s/dt,c8y/s/dat')
connected = c8y.connect(on_message)
logger.info('Connection Result:' + str(connected))
if connected != 0:
logger.error('Connection not possible: ' + str(connected))
exit()
c8y.publish("c8y/s/us", "114,c8y_RemoteAccessConnect")
| 38.382716
| 145
| 0.620135
| 344
| 3,109
| 5.505814
| 0.357558
| 0.052798
| 0.027455
| 0.029567
| 0.135692
| 0.101901
| 0.084477
| 0
| 0
| 0
| 0
| 0.023656
| 0.252171
| 3,109
| 80
| 146
| 38.8625
| 0.790968
| 0
| 0
| 0.073529
| 0
| 0.014706
| 0.203412
| 0.042163
| 0
| 0
| 0
| 0
| 0
| 1
| 0.073529
| false
| 0
| 0.102941
| 0
| 0.205882
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
550fdb4e80b863ed65bbb7d6dee920e010a04788
| 1,397
|
py
|
Python
|
Homework 1/question_solutions/question_3_convergence.py
|
rukmal/FE-621-Homework
|
9c7cef7931b58aed54867acd8e8cf1928bc6d2dd
|
[
"MIT"
] | 4
|
2020-04-29T04:34:50.000Z
|
2021-11-11T07:49:08.000Z
|
Homework 1/question_solutions/question_3_convergence.py
|
rukmal/FE-621-Homework
|
9c7cef7931b58aed54867acd8e8cf1928bc6d2dd
|
[
"MIT"
] | null | null | null |
Homework 1/question_solutions/question_3_convergence.py
|
rukmal/FE-621-Homework
|
9c7cef7931b58aed54867acd8e8cf1928bc6d2dd
|
[
"MIT"
] | 1
|
2020-04-23T07:32:44.000Z
|
2020-04-23T07:32:44.000Z
|
from context import fe621
import numpy as np
import pandas as pd
def convergenceSegmentLimit():
"""Function to compute the number of segments required for convergence of
various quadrature methods.
"""
# Objective function
def f(x: float) -> float:
return np.where(x == 0.0, 1.0, np.sin(x) / x)
# Setting target tolerance level for termination
epsilon = 1e-3
# Using Trapezoidal rule
trapezoidal_result = fe621.numerical_integration.convergenceApproximation(
f=f,
rule=fe621.numerical_integration.trapezoidalRule,
epsilon=epsilon
)
# Using Simpson's rule
simpsons_result = fe621.numerical_integration.convergenceApproximation(
f=f,
rule=fe621.numerical_integration.simpsonsRule,
epsilon=epsilon
)
# Building DataFrame of results for output
results = pd.DataFrame(np.abs(np.array([trapezoidal_result,
simpsons_result])))
# Setting row and column names
results.columns = ['Estimated Area', 'Segments']
results.index = ['Trapezoidal Rule', 'Simpson\'s Rule']
# Saving to CSV
results.to_csv('Homework 1/bin/numerical_integration/convergence.csv',
header=True, index=True, float_format='%.8e')
if __name__ == '__main__':
# Part 3 - Convergence Analysis
convergenceSegmentLimit()
| 28.510204
| 78
| 0.662133
| 155
| 1,397
| 5.845161
| 0.516129
| 0.110375
| 0.110375
| 0.068433
| 0.189845
| 0.189845
| 0.189845
| 0.189845
| 0.189845
| 0.189845
| 0
| 0.022879
| 0.249105
| 1,397
| 48
| 79
| 29.104167
| 0.840801
| 0.23121
| 0
| 0.16
| 0
| 0
| 0.104563
| 0.040875
| 0
| 0
| 0
| 0
| 0
| 1
| 0.08
| false
| 0
| 0.12
| 0.04
| 0.24
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
55169c728f2c5da43dc85a640e3450e734567aeb
| 20,482
|
py
|
Python
|
opusfilter/filters.py
|
BrightXiaoHan/OpusFilter
|
804c82a46837fc57ca69934314622043248f6042
|
[
"MIT"
] | null | null | null |
opusfilter/filters.py
|
BrightXiaoHan/OpusFilter
|
804c82a46837fc57ca69934314622043248f6042
|
[
"MIT"
] | null | null | null |
opusfilter/filters.py
|
BrightXiaoHan/OpusFilter
|
804c82a46837fc57ca69934314622043248f6042
|
[
"MIT"
] | null | null | null |
"""Corpus filtering"""
import difflib
import itertools
import logging
import math
import os
import string
from typing import Iterator, List, Tuple
import rapidfuzz
import regex
from langid.langid import LanguageIdentifier, model
import pycld2
from bs4 import BeautifulSoup as bs
import fasttext
from . import FilterABC, ConfigurationError
from .util import check_args_compability
from .lm import CrossEntropyFilter, CrossEntropyDifferenceFilter, LMClassifierFilter # pylint: disable=W0611 # noqa: F401
from .word_alignment import WordAlignFilter # pylint: disable=W0611 # noqa: F401
from .embeddings import SentenceEmbeddingFilter # pylint: disable=W0611 # noqa: F401
logger = logging.getLogger(__name__)
class LengthFilter(FilterABC):
"""Sentence length filter"""
def __init__(self, min_length=1, max_length=100, unit='word', pass_empty=False, **kwargs):
min_length, max_length, unit = check_args_compability(
min_length, max_length, unit,
required_types=[int, int, str],
choices=[None, None, ('word', 'char', 'character')],
names=['min_length', 'max_length', 'unit'])
self.min_length = min_length
self.max_length = max_length
self.unit = unit
self.pass_empty = pass_empty
super().__init__(**kwargs)
def get_length(self, segment, idx):
"""Return length of the segment in index"""
if self.unit[idx] == 'word':
return len(segment.split())
return len(segment)
def score(self, pairs):
for pair in pairs:
yield [self.get_length(segment, idx) for idx, segment in enumerate(pair)]
def accept(self, score):
if self.pass_empty and sum(score) == 0:
return True
return all(self.min_length[idx] <= length <= self.max_length[idx] for idx, length in enumerate(score))
class LengthRatioFilter(FilterABC):
"""Character length ratio"""
def __init__(self, threshold=3, unit='word', **kwargs):
self.threshold = threshold
self.unit = check_args_compability(
unit, required_types=[str], choices=[('word', 'char', 'character')], names=['unit'])
super().__init__(**kwargs)
def get_length(self, segment, idx):
"""Return length of the segment in index"""
if self.unit[idx] == 'word':
return len(segment.split())
return len(segment)
def score(self, pairs):
for pair in pairs:
lengths = sorted(self.get_length(segment, idx) for idx, segment in enumerate(pair))
if lengths[0] == 0:
if lengths[-1] == 0:
yield 0
else:
yield float('inf')
else:
yield lengths[-1] / lengths[0]
def accept(self, score):
return score < self.threshold
class LongWordFilter(FilterABC):
"""Word length filter"""
def __init__(self, threshold=40, **kwargs):
self.threshold = check_args_compability(threshold, required_types=[int], names=['threshold'])
super().__init__(**kwargs)
def score(self, pairs):
for pair in pairs:
yield [max((len(word) for word in segment.split()), default=0) for segment in pair]
def accept(self, score):
return all(length < self.threshold[idx] for idx, length in enumerate(score))
class AverageWordLengthFilter(FilterABC):
"""Average word length filter
Returns zeros for empty segments. If pass_empty is true, pairs
with only empty segments are accepted.
"""
def __init__(self, min_length=2, max_length=20, pass_empty=False, **kwargs):
min_length, max_length = check_args_compability(
min_length, max_length, required_types=[int, int], names=['min_length', 'max_length'])
self.min_length = min_length
self.max_length = max_length
self.pass_empty = pass_empty
super().__init__(**kwargs)
@staticmethod
def _average_word_len(sentence):
parts = sentence.split()
if parts:
return len(''.join(parts)) / len(parts)
return 0
def score(self, pairs):
for pair in pairs:
yield [self._average_word_len(sent) for sent in pair]
def accept(self, score):
if self.pass_empty and sum(score) == 0:
return True
return all(self.min_length[idx] <= length <= self.max_length[idx] for idx, length in enumerate(score))
class HtmlTagFilter(FilterABC):
"""HTML tag filter"""
def __init__(self, **kwargs):
super().__init__(**kwargs)
def check(self, segment):
try:
found = bool(bs(segment, 'html.parser').find())
except TypeError as err:
logger.warning("BeautifulSoup parsing failed for %s: %s", repr(segment), err)
found = True
return found
def score(self, pairs):
for pair in pairs:
yield [self.check(sent) for sent in pair]
def accept(self, score):
return not any(score)
class RegExpFilter(FilterABC):
"""Filter out segments that match or do not match a regular expression
You can either provide a single regexp or one for each language in
the parallel data. The regex library is used for the search.
If accept_match is False, the pair is accepted only if none of the
segment match the corresponding regexp. If accept_match is True,
the pair is accepted only if all segments match the corresponding
regexp.
"""
def __init__(self, regexps=None, accept_match=False, **kwargs):
self.regexps = check_args_compability(regexps, required_types=[str], names=['regexps'])
self.accept_match = accept_match
super().__init__(**kwargs)
def score(self, pairs):
for pair in pairs:
yield [bool(regex.search(self.regexps[idx], segment)) for idx, segment in enumerate(pair)]
def accept(self, score):
if self.accept_match:
return all(score)
return not any(score)
class AlphabetRatioFilter(FilterABC):
"""Proportion of alphabetic characters in the segment"""
def __init__(self, threshold=0.75, exclude_whitespace=False, **kwargs):
self.threshold = check_args_compability(threshold, required_types=[float], names=['threshold'])
self.exclude_whitespace = exclude_whitespace
self.re_whitespace = regex.compile(r'\s')
self.re_not_alphas = regex.compile(r'\p{Alphabetic=No}')
super().__init__(**kwargs)
def score(self, pairs):
for pair in pairs:
scores = []
for segment in pair:
if self.exclude_whitespace:
segment = self.re_whitespace.sub('', segment)
alphas = self.re_not_alphas.sub('', segment)
if segment:
scores.append(len(alphas) / len(segment))
else:
scores.append(1.0)
yield scores
def accept(self, score):
return all(ratio >= threshold for ratio, threshold in zip(score, self.threshold))
class CharacterScoreFilter(FilterABC):
"""Proportion of alphabetic characters that are in the given script
For a list of valid scripts, see e.g.
https://www.regular-expressions.info/unicode.html
"""
def __init__(self, scripts=None, thresholds=None, **kwargs):
if scripts is None:
raise ConfigurationError("A list of language scripts needs to be defined")
self.scripts = scripts
self.thresholds = [1] * len(scripts) if thresholds is None else thresholds
if len(self.scripts) != len(self.thresholds):
raise ConfigurationError(
f"Mismatch in number of scripts ({len(self.scripts)}) and thresholds ({len(self.thresholds)})")
self.re_not_alphas = regex.compile(r'\p{Alphabetic=No}')
self.re_not_script = [regex.compile(fr'\p{{^Script={script}}}')
for script in self.scripts]
super().__init__(**kwargs)
def score(self, pairs):
for pair in pairs:
if len(pair) != len(self.scripts):
raise ValueError(f"Mismatch in number of scripts ({len(self.scripts)}) and sentences ({len(pair)})")
scores = []
for idx, sent in enumerate(pair):
alphas = self.re_not_alphas.sub('', sent)
if alphas:
script = self.re_not_script[idx].sub('', alphas)
scores.append(len(script) / len(alphas))
else:
scores.append(1.0)
yield scores
def accept(self, score):
return all(ratio >= threshold for ratio, threshold in zip(score, self.thresholds))
class LanguageIDFilter(FilterABC):
"""Language identification confidence filter
Currently this supports three methods:
* langid (default): see :cite:`lui-baldwin-2012-langid`
* cld2: see https://github.com/CLD2Owners/cld2
* fasttext: see :cite:`joulin-etal-2016-fasttext` and :cite:`joulin-etal-2017-bag`
"""
def __init__(self, languages=None, id_method='langid', thresholds=None,
fasttext_model_path=None, langid_languages=None, cld2_options=None,
**kwargs):
super().__init__(**kwargs)
if languages is None:
raise ConfigurationError("A list of language codes needs to be defined")
# fasttext options
if id_method == 'fasttext' and not fasttext_model_path:
raise ConfigurationError("FastText language ID method was choosen without specifying "
"any path to fasttext model")
if id_method != 'fasttext' and fasttext_model_path:
raise ConfigurationError("FastText language ID method was not choosen but fasttext "
"path to model was set")
self.fasttext_model = fasttext.load_model(os.path.join(self.workdir, fasttext_model_path)) \
if id_method == 'fasttext' else None
# langid options
if id_method == 'langid':
self.identifier = LanguageIdentifier.from_modelstring(model, norm_probs=True)
if langid_languages:
self.identifier.set_languages(langid_languages)
else:
if langid_languages:
raise ConfigurationError(
"langid_languages option is supported only by the method langid")
self.identifier = None
# cld2 options
if id_method == 'cld2':
self.cld2_options = cld2_options if cld2_options else {}
else:
if cld2_options:
raise ConfigurationError("cld2_options is supported only by the method cld2")
self.cld2_options = None
# global options
self.languages = languages
self.id_method = id_method
self.thresholds = [0] * len(self.languages) if thresholds is None else thresholds
def confidence(self, sentence: str, lan: str) -> float:
"""Return confidence of the identifier"""
if not sentence:
# Prevent filtering empty lines
return 1.0
if self.id_method == 'cld2':
try:
clddetails = pycld2.detect(sentence, **self.cld2_options)
except pycld2.error as err:
logger.warning("pycld2 could not process '%s' due to: %s", sentence, err)
clddetails = (0, 0, ((0, 'un', 0.0), 0))
cldlan = clddetails[2][0][1]
cldconf = round(clddetails[2][0][2]/100, 2)
if cldlan != lan:
cldconf = 0.0
return cldconf
if self.id_method == 'langid':
lidetails = self.identifier.classify(sentence)
lilan, liconf = lidetails[0], round(lidetails[1], 2)
if lilan != lan:
liconf = 0.0
return liconf
if self.id_method == 'fasttext':
lang, confidence = self._fasttext_predict_lang(sentence)
if lang != lan:
liconf = 0.0
else:
liconf = confidence
return liconf
raise ValueError(f"Unknown language identification method '{self.id_method}'")
def score(self, pairs: List[Tuple[str, str]]) -> Iterator[List[float]]:
for pair in pairs:
yield [self.confidence(sent, self.languages[idx]) for idx, sent in enumerate(pair)]
def accept(self, score: Tuple[float, float]) -> bool:
return all(conf > threshold for conf, threshold in zip(score, self.thresholds))
def _fasttext_predict_lang(self, texts: List[str]) -> Tuple[str, float]:
output = self.fasttext_model.predict(texts, k=1)
confidence = output[1][0]
label = output[0][0][9:]
return label, confidence
class TerminalPunctuationFilter(FilterABC):
"""Penalty score with respect to the co-occurrence of terminal punctuation marks
See :cite:`vazquez-etal-2019-university`
"""
def __init__(self, threshold=-2, **kwargs):
self.threshold = threshold
super().__init__(**kwargs)
def score(self, pairs):
for pair in pairs:
if len(pair) != 2:
raise ValueError("Only bilingual input supported by TerminalPunctuationFilter")
sent1, sent2 = pair
spun = len([c for c in sent1 if c in ['.', '?', '!', '…']])
tpun = len([c for c in sent2 if c in ['.', '?', '!', '…']])
score = abs(spun-tpun)
if spun > 1:
score += spun - 1
if tpun > 1:
score += tpun - 1
score = -math.log(score + 1)
yield score
def accept(self, score):
return score >= self.threshold
class NonZeroNumeralsFilter(FilterABC):
"""Similarity measure between numerals of the two sentences with zeros removed
If require_all is True, all scores (for pairs of n segments) have
to be equal or above the threshold; otherwise at least one the
scores have to be equal or above the threshold. For bilingual
input, it has no effect.
See :cite:`vazquez-etal-2019-university`
"""
def __init__(self, threshold=0.5, require_all=True, **kwargs):
self.threshold = threshold
self.require_all = require_all
super().__init__(**kwargs)
def score(self, pairs):
for pair in pairs:
nums = [[int(c) for c in sent if c in string.digits and c != '0']
for sent in pair]
ratios = []
for num1, num2 in itertools.combinations(nums, 2):
seq = difflib.SequenceMatcher(None, num1, num2)
ratios.append(seq.ratio())
yield ratios
def accept(self, score):
if self.require_all:
return all(ratio >= self.threshold for ratio in score)
return any(ratio >= self.threshold for ratio in score)
class LongestCommonSubstringFilter(FilterABC):
"""Ratios of longest common substring to the shorter of the strings
If require_all is True, all ratios (for pairs of n segments) have
to be below the threshold; otherwise at least one the ratios have
to be below the threshold. For bilingual input, it has no effect.
"""
def __init__(self, threshold=0.9, require_all=True, **kwargs):
self.threshold = threshold
self.require_all = require_all
super().__init__(**kwargs)
def score(self, pairs):
for pair in pairs:
ratios = []
for seq1, seq2 in itertools.combinations(pair, 2):
seq = difflib.SequenceMatcher(isjunk=None, a=seq1, b=seq2)
_, _, size = seq.find_longest_match(0, len(seq1), 0, len(seq2))
minlen = min(len(seq1), len(seq2))
ratios.append(0 if minlen == 0 else size / minlen)
yield ratios
def accept(self, score):
if self.require_all:
return all(ratio < self.threshold for ratio in score)
return any(ratio < self.threshold for ratio in score)
class SimilarityFilter(FilterABC):
"""Filter on string/sequence similarity
Uses Levenshtein distance implemented in the RapidFuzz library.
The weights parameter can be used to change the costs of the three
operations (insertion, deletion, substitution).
If require_all is True, all ratios (for pairs of n segments) have
to be below the threshold; otherwise at least one the ratios have
to be below the threshold. For bilingual input, it has no effect.
"""
VALID_UNITS = ('word', 'char', 'character')
def __init__(self, threshold=0.9, weights=(1, 1, 1), unit='char', lowercase=False,
require_all=True, **kwargs):
if unit not in self.VALID_UNITS:
raise ConfigurationError(
f"Value of 'unit' are not one of the allowed choices {self.VALID_UNITS}: {unit}")
self.threshold = threshold
self.weights = weights
self.unit = unit
self.lowercase = lowercase
self.require_all = require_all
super().__init__(**kwargs)
def similarity(self, seq1, seq2):
"""Return normalized similarity between the sequences"""
if self.lowercase:
seq1 = seq1.lower()
seq2 = seq2.lower()
if self.unit == 'word':
seq1 = seq1.split()
seq2 = seq2.split()
return rapidfuzz.distance.Levenshtein.normalized_similarity(
seq1, seq2, weights=self.weights)
def score(self, pairs):
for pair in pairs:
yield [self.similarity(seq1, seq2) for seq1, seq2 in itertools.combinations(pair, 2)]
def accept(self, score):
if self.require_all:
return all(ratio < self.threshold for ratio in score)
return any(ratio < self.threshold for ratio in score)
class RepetitionFilter(FilterABC):
"""Filter segments with repeated content
Filter segments with substrings of min_length to max_length
characters that are repeated at least threshold number of times.
The first occurrence is not counted to the threshold, i.e.,
threshold 2 means that the substring has to occur three times.
There may be optional space character(s) between the repeated
strings that are not counted to the length. The repeated string
cannot start with a whitespace character but is not limited
otherwise.
"""
def __init__(self, threshold=2, min_length=3, max_length=100, **kwargs):
if threshold < 1:
raise ConfigurationError("threshold for RepetitionFilter has to be at least one")
if min_length < 1:
raise ConfigurationError("min_length for RepetitionFilter has to be at least one")
self._threshold = threshold
self._min_length = min_length
self._max_length = max_length
self._regexp = self._get_regexp()
super().__init__(**kwargs)
@property
def min_length(self):
"""Minimum number of characters in pattern"""
return self._min_length
@property
def max_length(self):
"""Maximum number of characters in pattern"""
return self._max_length
@property
def threshold(self):
"""Threshold for the number of repetitions"""
return self._threshold
def _get_regexp(self):
"""Return compiled regexp for finding repetitions"""
rstring = f'(\\S.{{{self.min_length-1},{self.max_length}}}?)(?: *\\1){{{self.threshold},}}'
return regex.compile(rstring)
def get_repetitions(self, segment):
"""Return the number of repetitions and the repeated string
Returns the number of repetitions and the repeated string for
the first match of at least self.threshold number of
repetitions. The segment may contain longer repetitions than
the one returned. If there no matched repetitions, zero and
None are returned.
"""
match = self._regexp.search(segment)
if match:
full = match.group(0)
repeated = match.group(1)
return full.count(repeated) - 1, repeated
return 0, None
def score(self, pairs):
for pair in pairs:
yield [self.get_repetitions(sent)[0] for sent in pair]
def accept(self, score):
return all(repetitions < self.threshold for repetitions in score)
| 36.640429
| 122
| 0.621424
| 2,508
| 20,482
| 4.955343
| 0.158293
| 0.032427
| 0.012391
| 0.01915
| 0.399903
| 0.36466
| 0.326118
| 0.309623
| 0.261667
| 0.230206
| 0
| 0.013227
| 0.280197
| 20,482
| 558
| 123
| 36.706093
| 0.829343
| 0.173811
| 0
| 0.34903
| 0
| 0.00277
| 0.077393
| 0.011488
| 0
| 0
| 0
| 0
| 0
| 1
| 0.149584
| false
| 0.016621
| 0.049862
| 0.022161
| 0.34903
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
55192286c083738515eab7ff51801fcee926be51
| 2,119
|
py
|
Python
|
dkr-py310/docker-student-portal-310/course_files/begin_advanced/py_numpy_1.py
|
pbarton666/virtual_classroom
|
a9d0dc2eb16ebc4d2fd451c3a3e6f96e37c87675
|
[
"MIT"
] | null | null | null |
dkr-py310/docker-student-portal-310/course_files/begin_advanced/py_numpy_1.py
|
pbarton666/virtual_classroom
|
a9d0dc2eb16ebc4d2fd451c3a3e6f96e37c87675
|
[
"MIT"
] | null | null | null |
dkr-py310/docker-student-portal-310/course_files/begin_advanced/py_numpy_1.py
|
pbarton666/virtual_classroom
|
a9d0dc2eb16ebc4d2fd451c3a3e6f96e37c87675
|
[
"MIT"
] | null | null | null |
#py_numpy_1.py
"""a snake-charming application"""
from PIL import Image
import numpy as np
import os
idir =os.getcwd()
iname= 'im3.png'# 'white_snake.PNG'
saveas='new_snake.PNG'
#sets up an array for pixel processing
white=np.array([255,255,255,0]) #r, g, b, a
transparent = np.array([0, 0, 0, 0])
background = white
#open the image and convert it
raw_image = Image.open(iname)
converted_image = raw_image.convert('RGBA')
raw_image.close()
h, w = converted_image.size
converted_histo=converted_image.histogram()
converted_colors=converted_image.getcolors(w*h)
#dump the data into a numpy array and split the channels "bands"
data = np.array(converted_image) # h * w * 4 array (rgba)
r, g, b, a = data.T
#this sets the masking condition and replaces the background color
replace = (r == background[0]) & (b == background[1]) & (g == background[2])
data[replace.T] = (0,0,0,0)
#generate a new image, grab some stats, and save it.
new_image = Image.fromarray(data, 'RGBA')
h, w = new_image.size
new_histo=new_image.histogram()
new_colors=new_image.getcolors(w*h) #a list of tuples [count (rgba), ...]
new_image.save(saveas)
recovered_image = Image.open(saveas)
h, w = recovered_image.size
recovered_histo=recovered_image.histogram()
recovered_colors=recovered_image.getcolors(w*h) #a list of tuples [count (rgba), ...]
#strategy: make a list of color bins we expect to find. These will have pixel ranges
# that are human-friendly e.g., 'brownish', 'gold'. Each spec within the bin can be
# additively applied to a mask - functionally reducing the color palette.
reduced_image = recovered_image.convert('P', palette=Image.ADAPTIVE, colors=10)
reduc1 = reduced_image = recovered_image.convert('P', palette=Image.ADAPTIVE, colors=10)
reduc2 = reduc1.convert('RGB') #turns it to rgb
#save the image in a couple formats
reduc_fn = 'scratch.BMP'
reduc2.save(reduc_fn)
reduced_histo=reduced_image.histogram()
reduced_colors=reduced_image.getcolors(w*h) #a list of tuples [count (rgba), ...]
reduced_image.save(saveas+'reduced.BMP')
#now show them
recovered_image.show()
reduced_image.show()
recovered_image.close()
| 32.6
| 88
| 0.747994
| 344
| 2,119
| 4.488372
| 0.366279
| 0.072539
| 0.007772
| 0.041451
| 0.154145
| 0.154145
| 0.154145
| 0.154145
| 0.154145
| 0.154145
| 0
| 0.017223
| 0.123171
| 2,119
| 65
| 89
| 32.6
| 0.813778
| 0.351581
| 0
| 0
| 0
| 0
| 0.04068
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.076923
| 0
| 0.076923
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
5519d50b257da54e3b0054563d77f75181fccdb3
| 2,958
|
py
|
Python
|
zfg2parser.py
|
valoeghese/ZoesteriaConf
|
10c3239acde2b4568bb48d1d71b3798ecff0afc3
|
[
"MIT"
] | 1
|
2020-09-02T19:24:22.000Z
|
2020-09-02T19:24:22.000Z
|
zfg2parser.py
|
valoeghese/ZoesteriaConf
|
10c3239acde2b4568bb48d1d71b3798ecff0afc3
|
[
"MIT"
] | 7
|
2020-08-08T01:57:29.000Z
|
2021-05-08T08:50:19.000Z
|
zfg2parser.py
|
valoeghese/ZoesteriaConf
|
10c3239acde2b4568bb48d1d71b3798ecff0afc3
|
[
"MIT"
] | 1
|
2021-04-29T14:25:21.000Z
|
2021-04-29T14:25:21.000Z
|
class Container:
def __init__(self):
self.data = {}
index = -1
# compound "container" entries
def parseContainer(container, data, size):
global index
mode = 0 # 0 = var names, 1 = var values
buffer = ""
while index + 1 < size:
index += 1
char = data[index]
if char == '}':
break
elif char == "#":
parseComment(data, size)
elif mode == 1:
if char.isspace():
pass
else:
if char == '{': # new container
container.data[buffer] = parseContainer(Container(), data, size)
elif char == '[': # new list
container.data[buffer] = parseList(data, size)
elif char == ';': # new empty data object
container.data[buffer] = ""
else: # new data object
container.data[buffer] = parseData(data, size)
buffer = ""
mode = 0
elif char == '=':
mode = 1
elif not char.isspace():
buffer += char # append character to string buffer
return container
# list entries
def parseList(data, size):
global index
buffer = []
while index + 1 < size:
index += 1
char = data[index]
if char == ']':
break
elif char == "#":
parseComment(data, size)
elif not char.isspace():
if char == '{': # new container
buffer.append(parseContainer(Container(), data, size))
elif char == '[': # new list
buffer.append(parseList(data, size))
elif char == ';': # new empty data object
buffer.append("")
else: # new data object
buffer.append(parseData(data, size))
return buffer
# data value entries
def parseData(data, size):
global index
buffer = data[index] # initial character is already at the index
while index + 1 < size:
index += 1
char = data[index]
if char == ';':
break
elif (not char.isspace()) or char == ' ':
# the only form of whitespace in data values allowed is spaces
# tabs, carriage return, and line feed are considered merely formatting
buffer += char
return buffer.strip() # remove trailing whitespace
# comments
def parseComment(data, size):
global index
while index + 1 < size:
index += 1
char = data[index]
if char == '\n': # break comment on new lines
break
fileData = ""
with open(input(), 'r') as file:
fileData = file.read()
fileSize = len(fileData)
if (fileSize == 0):
print("File is empty!")
else:
fileContent = parseContainer(Container(), fileData, fileSize)
| 29.58
| 85
| 0.499324
| 298
| 2,958
| 4.942953
| 0.278523
| 0.065173
| 0.04888
| 0.051595
| 0.363204
| 0.300747
| 0.300747
| 0.300747
| 0.238289
| 0.179905
| 0
| 0.008949
| 0.395538
| 2,958
| 99
| 86
| 29.878788
| 0.814877
| 0.162948
| 0
| 0.525
| 0
| 0
| 0.012739
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.0625
| false
| 0.0125
| 0
| 0
| 0.1125
| 0.0125
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
551b09ca0d43e7df528a517a764809a6c7946e75
| 3,017
|
py
|
Python
|
syndicate/connection/secrets_manager_connection.py
|
Dmytro-Skorniakov/aws-syndicate
|
81363334886c53969f1f0a0c0ac0168318204990
|
[
"Apache-2.0"
] | null | null | null |
syndicate/connection/secrets_manager_connection.py
|
Dmytro-Skorniakov/aws-syndicate
|
81363334886c53969f1f0a0c0ac0168318204990
|
[
"Apache-2.0"
] | null | null | null |
syndicate/connection/secrets_manager_connection.py
|
Dmytro-Skorniakov/aws-syndicate
|
81363334886c53969f1f0a0c0ac0168318204990
|
[
"Apache-2.0"
] | null | null | null |
"""
Copyright 2018 EPAM Systems, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from boto3 import client
from syndicate.commons.log_helper import get_logger
from syndicate.connection.helper import apply_methods_decorator, retry
_LOG = get_logger('syndicate.connection.secrets_manager_connection')
@apply_methods_decorator(retry)
class SecretsManagerConnection(object):
def __init__(self, region=None, aws_access_key_id=None,
aws_secret_access_key=None, aws_session_token=None):
self.client = client('secretsmanager', region,
aws_access_key_id=aws_access_key_id,
aws_secret_access_key=aws_secret_access_key,
aws_session_token=aws_session_token)
_LOG.debug('Opened new Secrets Manager connection.')
def describe_secret(self, secret_id):
response = self.client.describe_secret(SecretId=secret_id)
return response
def get_secret_value(self, secret_id, secret_type='string', version_id=None, version_label=None):
arguments = {
'SecretId': secret_id,
}
if version_id and version_label:
raise AssertionError('version_id and version_label cannot be passed both')
elif version_id:
arguments['VersionId'] = version_id
else:
arguments['VersionLabel'] = version_label
if secret_type != 'string' and secret_type != 'binary':
raise AssertionError('wrong type value. only string or binary is allowed')
response = self.client.get_secret_value(**arguments)
if response and secret_type == 'string':
return response.get('SecretString')
if response and secret_type == 'binary':
return response.get('SecretBinary')
def put_secret_value(self, secret_id, secret_value, secret_type='string',
labels=None):
labels = set(labels)
arguments = {
'SecretId': secret_id,
'VersionStages': labels
}
if secret_type == 'string':
arguments['SecretString'] = secret_value
elif secret_type == 'binary':
arguments['SecretBinary'] = secret_value
else:
raise AssertionError('wrong secret_type value. only string or binary is allowed')
self.client.put_secret_value(**arguments)
def delete_secret(self, secret_id):
self.client.delete_parameter(SecretId=secret_id)
| 39.181818
| 101
| 0.670202
| 362
| 3,017
| 5.364641
| 0.364641
| 0.046344
| 0.041195
| 0.021627
| 0.149846
| 0.066941
| 0.037075
| 0.037075
| 0
| 0
| 0
| 0.003993
| 0.2529
| 3,017
| 76
| 102
| 39.697368
| 0.857587
| 0.183626
| 0
| 0.122449
| 0
| 0
| 0.16646
| 0.019462
| 0
| 0
| 0
| 0
| 0.061224
| 1
| 0.102041
| false
| 0.020408
| 0.061224
| 0
| 0.244898
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
551b46a9abb3f55dfb647a884266cebfd6c4db08
| 3,324
|
py
|
Python
|
Simple GAN/networks.py
|
Srujan35007/My-GANs
|
7953c859169134a0a84ac3cd674f629af9942465
|
[
"MIT"
] | null | null | null |
Simple GAN/networks.py
|
Srujan35007/My-GANs
|
7953c859169134a0a84ac3cd674f629af9942465
|
[
"MIT"
] | null | null | null |
Simple GAN/networks.py
|
Srujan35007/My-GANs
|
7953c859169134a0a84ac3cd674f629af9942465
|
[
"MIT"
] | null | null | null |
import time
b = time.time()
import torch
import torch.nn as nn
import torch.nn.functional as F
from torchsummary import summary
from math import log2, exp
a = time.time()
print('Imports complete in %.3f seconds' % (a-b))
class Generator(nn.Module):
def __init__(self,latent_dims=128,n_channels=3,max_resolution=64,max_channels=256,min_channels=16):
super(Generator, self).__init__()
self.n_z = latent_dims
assert latent_dims > 16
num_blocks = int(log2(max_resolution))-2
layers_ = []
layers_.append(nn.ConvTranspose2d(self.n_z//16,max_channels,3,1,1))
output_channels = -1
# The upsampling pyramid
for i in range(num_blocks):
in_channels_ = max(min_channels, max_channels//((2**i)))
out_channels_ = max(min_channels, in_channels_//2)
layers_.append(self.make_gen_block(in_channels_,out_channels_,4,2,1))
output_channels = out_channels_
layers_.append(nn.Conv2d(output_channels,n_channels,3,1,1))
layers_.append(nn.Tanh())
self.layers = nn.Sequential(*layers_)
def make_gen_block(self,in_channels,out_channels,kernel_size_=3,stride_=1,padding_=1):
return nn.Sequential(
nn.ConvTranspose2d(in_channels,out_channels,
kernel_size_,stride_,padding=padding_,bias=False),
nn.BatchNorm2d(out_channels),
nn.LeakyReLU(negative_slope=0.2,inplace=True)
)
def forward(self, latent):
latent = latent.view(-1,self.n_z//16,4,4)
return self.layers(latent)
class Discriminator(nn.Module):
def __init__(self,max_resolution=64,n_channels=3,max_channels=256,min_channels=8):
super(Discriminator, self).__init__()
self.max_resolution = max_resolution
self.n_channels = n_channels
self.max_channels = max_channels
num_blocks = int(log2(max_resolution))-1
layers_ = []
layers_.append(nn.Conv2d(n_channels, max(min_channels,max_channels//(2**num_blocks)),3,1,1))
for i in range(num_blocks):
in_ = max_channels//(2**(num_blocks-i))
if in_ < min_channels:
in_ = min_channels
out_ = min_channels
else:
out_ = in_*2
layers_.append(self.make_disc_block(in_,out_,4,2,1))
layers_.append(nn.AvgPool2d(2))
self.layers = nn.Sequential(*layers_)
def make_disc_block(self,in_channels,out_channels,kernel_size_=3,stride_=1,padding_=1):
return nn.Sequential(
nn.Conv2d(in_channels,out_channels,kernel_size_,stride_,padding_,bias=False),
nn.BatchNorm2d(out_channels),
nn.LeakyReLU(0.2,inplace=True)
)
def forward(self,image):
image = image.view(-1,self.n_channels,self.max_resolution,self.max_resolution)
return nn.Sigmoid()(torch.mean(self.layers(image).view(-1,self.max_channels),dim=1,keepdim=True))
print('\nGenerator Summary\n')
gen = Generator(max_resolution=512,max_channels=512,min_channels=8)
a = torch.empty(128).normal_(0,0.35).view(-1,128)
(summary(gen, (1,128)))
print('\nDiscriminator Summary\n')
disc = Discriminator(max_channels=512,max_resolution=512)
a = torch.empty(10,3,512,512).normal_(0,0.35)
(summary(disc, (1,3,512,512)))
| 39.571429
| 105
| 0.663357
| 467
| 3,324
| 4.430407
| 0.211991
| 0.058482
| 0.055099
| 0.050749
| 0.381344
| 0.304012
| 0.275979
| 0.161914
| 0.122765
| 0.073465
| 0
| 0.047655
| 0.21089
| 3,324
| 84
| 106
| 39.571429
| 0.741136
| 0.006619
| 0
| 0.138889
| 0
| 0
| 0.023629
| 0
| 0
| 0
| 0
| 0
| 0.013889
| 1
| 0.083333
| false
| 0
| 0.097222
| 0.027778
| 0.263889
| 0.041667
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
551c341bfebf085d7288ee08b3180f837e7122dd
| 2,607
|
py
|
Python
|
ui/Rhino/RV2/dev/__temp/RV2init_console_cmd.py
|
tkmmark/compas-RV2
|
bc18f4dada9c1e31a0f7df4ef981934c6d2b05b3
|
[
"MIT"
] | null | null | null |
ui/Rhino/RV2/dev/__temp/RV2init_console_cmd.py
|
tkmmark/compas-RV2
|
bc18f4dada9c1e31a0f7df4ef981934c6d2b05b3
|
[
"MIT"
] | null | null | null |
ui/Rhino/RV2/dev/__temp/RV2init_console_cmd.py
|
tkmmark/compas-RV2
|
bc18f4dada9c1e31a0f7df4ef981934c6d2b05b3
|
[
"MIT"
] | null | null | null |
from __future__ import print_function
from __future__ import absolute_import
from __future__ import division
import scriptcontext as sc
try:
import compas # noqa: F401
import compas_rhino # noqa: F401
import compas_ags # noqa: F401
import compas_tna # noqa: F401
import compas_cloud # noqa: F401
except ImportError:
# do something here to fix the problem
raise
else:
from compas_cloud import Proxy
from compas_rv2.rhino import BrowserForm
from compas_rv2.scene import Scene
__commandname__ = "RV2init"
def RunCommand(is_interactive):
browser = BrowserForm()
browser.Show()
sc.sticky["RV2.proxy"] = Proxy(background=False)
settings = {
"layers.skeleton": "RV2::Skeleton",
"layers.form": "RV2::FormDiagram",
"layers.force": "RV2::ForceDiagram",
"layers.thrust": "RV2::ThrustNetwork",
"show.form.vertices": True,
"show.form.edges": True,
"show.form.faces": False,
"show.force.vertices": True,
"show.force.edges": True,
"show.force.faces": False,
"show.thrust.vertices": True,
"show.thrust.edges": True,
"show.thrust.faces": True,
"color.form.vertices": (0, 255, 0),
"color.form.vertices:is_fixed": (0, 255, 255),
"color.form.vertices:is_external": (0, 0, 255),
"color.form.vertices:is_anchor": (255, 255, 255),
"color.form.edges": (0, 255, 0),
"color.form.edges:is_external": (0, 0, 255),
"color.thrust.vertices": (255, 0, 255),
"color.thrust.vertices:is_fixed": (0, 255, 0),
"color.thrust.vertices:is_anchor": (255, 0, 0),
"color.thrust.edges": (255, 0, 255),
"color.thrust.faces": (255, 0, 255),
"color.force.vertices": (0, 255, 0),
"color.force.vertices:is_fixed": (0, 255, 255),
"color.force.edges": (0, 255, 0),
"color.force.edges:is_external": (0, 0, 255),
"scale.thrust.external": 0.25,
"vertical.zmax": 4.0,
"vertical.kmax": 100,
"horizontal.kmax": 100,
"horizontal.alpha": 100
}
scene = Scene(settings)
scene.clear()
sc.sticky["RV2"] = {
"session": {
"cwd": None,
"ext": 'rv2',
"current": None
},
"scene": scene
# "data": DATA
}
# ==============================================================================
# Main
# ==============================================================================
if __name__ == '__main__':
RunCommand(True)
| 24.59434
| 80
| 0.542769
| 290
| 2,607
| 4.734483
| 0.282759
| 0.037873
| 0.018208
| 0.036417
| 0.217043
| 0.086672
| 0.03933
| 0
| 0
| 0
| 0
| 0.066082
| 0.257
| 2,607
| 105
| 81
| 24.828571
| 0.642747
| 0.102417
| 0
| 0
| 0
| 0
| 0.328467
| 0.118935
| 0
| 0
| 0
| 0
| 0
| 1
| 0.014706
| false
| 0
| 0.191176
| 0
| 0.205882
| 0.014706
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
551c6b36cc572d6424d1e8e35b4d86a47b2d8f93
| 3,783
|
py
|
Python
|
utils/count_supported.py
|
delcypher/smt-coral-wrapper
|
e7024efd83ba2da87fb178917f49a6b430b9c01c
|
[
"MIT"
] | 1
|
2018-05-04T03:51:58.000Z
|
2018-05-04T03:51:58.000Z
|
utils/count_supported.py
|
delcypher/smt2coral
|
e7024efd83ba2da87fb178917f49a6b430b9c01c
|
[
"MIT"
] | null | null | null |
utils/count_supported.py
|
delcypher/smt2coral
|
e7024efd83ba2da87fb178917f49a6b430b9c01c
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# vim: set sw=4 ts=4 softtabstop=4 expandtab:
"""
Read invocation info file for smt-runner
and report which benchmarks are supported
by the CoralPrinter.
"""
# HACK: put smt2coral in search path
import os
import sys
_repo_root = os.path.dirname(os.path.dirname(__file__))
sys.path.insert(0, _repo_root)
from smt2coral import Converter
from smt2coral import DriverUtil
from smt2coral import Util
import argparse
import logging
import yaml
_logger = logging.getLogger(__name__)
def loadYaml(openFile):
if hasattr(yaml, 'CLoader'):
# Use libyaml which is faster
_loader = yaml.CLoader
else:
_logger.warning('Using slow Python YAML loader')
_loader = yaml.Loader
return yaml.load(openFile, Loader=_loader)
def benchmark_can_be_converted(full_path):
assert os.path.exists(full_path)
translation_was_sound = None
# Parse using Z3
with open(full_path, 'r') as f:
_logger.debug('Opened "{}"'.format(f.name))
constraint, err = Util.parse(f)
if err is not None:
# Parser failure
_logger.error('Parser failure ({}): {}'.format(full_path, err))
sys.exit(1)
constraints = Util.split_bool_and(constraint)
# Try to do conversion
printer = Converter.CoralPrinter()
try:
_ = printer.print_constraints(constraints)
translation_was_sound = printer.translation_was_sound()
except Converter.CoralPrinterException as e:
_logger.debug('{}: {}: {}'.format(full_path, type(e).__name__, e))
return (False, translation_was_sound)
return (True, translation_was_sound)
def main(args):
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument("invocation_info_file",
nargs='?',
type=argparse.FileType('r'),
default=sys.stdin,
)
parser.add_argument('--benchmark-base',
type=str,
dest='benchmark_base',
default="")
DriverUtil.parserAddLoggerArg(parser)
pargs = parser.parse_args(args)
DriverUtil.handleLoggerArgs(pargs, parser)
# Load invocatin info
ii = loadYaml(pargs.invocation_info_file)
assert isinstance(ii, dict)
assert 'results' in ii
results = ii['results']
benchmarks_can_be_converted = set()
benchmarks_with_sound_translation = set()
benchmarks_cannot_be_converted = set()
# Iterate over benchmarks
for index, benchmark_info in enumerate(results):
benchmark = benchmark_info['benchmark']
full_path = os.path.join(pargs.benchmark_base, benchmark)
if not os.path.exists(full_path):
_logger.error('Could not find benchmark "{}"'.format(full_path))
return 1
can_convert, sound_translation = benchmark_can_be_converted(full_path)
progress_str = '{} of {}'.format(index + 1, len(results))
if can_convert:
_logger.info('{}: Conversion successful ({})'.format(
benchmark,
progress_str))
benchmarks_can_be_converted.add(benchmark)
if sound_translation:
benchmarks_with_sound_translation.add(benchmark)
else:
_logger.warning('{}: Conversion failed ({})'.format(
benchmark,
progress_str))
benchmarks_cannot_be_converted.add(benchmark)
# Report
print("# of benchmarks can be converted: {}".format(
len(benchmarks_can_be_converted)))
print("# of benchmarks that could be converted soundly: {}".format(
len(benchmarks_with_sound_translation)))
print("# of benchmarks cannot be converted: {}".format(
len(benchmarks_cannot_be_converted)))
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
| 32.612069
| 78
| 0.662437
| 441
| 3,783
| 5.426304
| 0.342404
| 0.050564
| 0.035102
| 0.040117
| 0.097785
| 0.025909
| 0
| 0
| 0
| 0
| 0
| 0.004478
| 0.23262
| 3,783
| 115
| 79
| 32.895652
| 0.819842
| 0.088025
| 0
| 0.070588
| 0
| 0
| 0.11176
| 0
| 0
| 0
| 0
| 0
| 0.035294
| 1
| 0.035294
| false
| 0
| 0.094118
| 0
| 0.176471
| 0.070588
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
551f2a4107231a13573c3c3b43e182531e15c00c
| 18,810
|
py
|
Python
|
atoman/filtering/filterer.py
|
chrisdjscott/Atoman
|
e87ac31bbdcf53bb8f3efdfb109787d604890394
|
[
"MIT"
] | 9
|
2015-11-23T12:13:34.000Z
|
2021-11-18T05:23:35.000Z
|
atoman/filtering/filterer.py
|
chrisdjscott/Atoman
|
e87ac31bbdcf53bb8f3efdfb109787d604890394
|
[
"MIT"
] | 1
|
2017-07-17T20:27:50.000Z
|
2017-07-23T05:27:15.000Z
|
atoman/filtering/filterer.py
|
chrisdjscott/Atoman
|
e87ac31bbdcf53bb8f3efdfb109787d604890394
|
[
"MIT"
] | 4
|
2015-11-23T12:13:37.000Z
|
2017-05-03T08:24:19.000Z
|
"""
The filterer object.
@author: Chris Scott
"""
from __future__ import absolute_import
from __future__ import unicode_literals
import copy
import time
import logging
import numpy as np
import six
from six.moves import zip
from .filters import _filtering as filtering_c
from ..system.atoms import elements
from . import voronoi
from .filters import base
from . import filters
from . import atomStructure
from ..rendering import _rendering
class Filterer(object):
"""
Filterer class.
Applies the selected filters in order.
"""
# known atom structure types
knownStructures = atomStructure.knownStructures
# all available filters
defaultFilters = [
"Species",
"Point defects",
"Crop box",
"Cluster",
"Displacement",
"Charge",
"Crop sphere",
"Slice",
"Coordination number",
"Voronoi neighbours",
"Voronoi volume",
"Bond order",
"Atom ID",
"ACNA",
"Slip",
"Bubbles",
]
defaultFilters.sort()
# filters that are compatible with the 'Point defects' filter
defectCompatibleFilters = [
"Crop box",
"Slice",
]
def __init__(self, voronoiOptions):
self.logger = logging.getLogger(__name__)
self.voronoiOptions = voronoiOptions
self._driftCompensation = False
self.reset()
def toggleDriftCompensation(self, driftCompensation):
"""Toggle the drift setting."""
self._driftCompensation = driftCompensation
def reset(self):
"""
Reset to initial state.
"""
self.inputState = None
self.refState = None
self.currentFilters = []
self.currentSettings = []
self.visibleAtoms = np.asarray([], dtype=np.int32)
self.interstitials = np.asarray([], dtype=np.int32)
self.vacancies = np.asarray([], dtype=np.int32)
self.antisites = np.asarray([], dtype=np.int32)
self.onAntisites = np.asarray([], dtype=np.int32)
self.splitInterstitials = np.asarray([], dtype=np.int32)
self.visibleSpecieCount = np.asarray([], dtype=np.int32)
self.vacancySpecieCount = np.asarray([], dtype=np.int32)
self.interstitialSpecieCount = np.asarray([], dtype=np.int32)
self.antisiteSpecieCount = np.asarray([], dtype=np.int32)
self.splitIntSpecieCount = np.asarray([], dtype=np.int32)
self.driftVector = np.zeros(3, np.float64)
self.clusterList = []
self.bubbleList = []
self.structureCounterDicts = {}
self.voronoiAtoms = voronoi.VoronoiAtomsCalculator(self.voronoiOptions)
self.voronoiDefects = voronoi.VoronoiDefectsCalculator(self.voronoiOptions)
self.scalarsDict = {}
self.latticeScalarsDict = {}
self.vectorsDict = {}
self.defectFilterSelected = False
self.bubblesFilterSelected = False
self.spaghettiAtoms = np.asarray([], dtype=np.int32)
def runFilters(self, currentFilters, currentSettings, inputState, refState, sequencer=False):
"""
Run the filters.
"""
# time
runFiltersTime = time.time()
# reset the filterer
self.reset()
# validate the list of filters
defectFilterSelected = False
bubblesFilterSelected = False
for filterName in currentFilters:
if filterName not in self.defaultFilters and not filterName.startswith("Scalar:"):
# TODO: check the scalar exists too
raise ValueError("Unrecognised filter passed to Filterer: '%s'" % filterName)
# check if the defect filter in the list
if filterName == "Point defects":
defectFilterSelected = True
elif filterName == "Bubbles":
bubblesFilterSelected = True
self.logger.debug("Defect filter selected: %s", defectFilterSelected)
self.defectFilterSelected = defectFilterSelected
self.bubblesFilterSelected = bubblesFilterSelected
# store refs to inputs
self.inputState = inputState
self.refState = refState
self.currentFilters = currentFilters
self.currentSettings = currentSettings
# set up visible atoms or defect arrays
if not defectFilterSelected:
self.logger.debug("Setting all atoms visible initially")
self.visibleAtoms = np.arange(inputState.NAtoms, dtype=np.int32)
self.logger.info("%d visible atoms", len(self.visibleAtoms))
# set Lattice scalars
self.logger.debug("Adding initial scalars from inputState")
for scalarsName, scalars in six.iteritems(inputState.scalarsDict):
self.logger.debug(" Adding '%s' scalars", scalarsName)
self.latticeScalarsDict[scalarsName] = copy.deepcopy(scalars)
# set initial vectors
self.logger.debug("Adding initial vectors from inputState")
for vectorsName, vectors in six.iteritems(inputState.vectorsDict):
self.logger.debug(" Adding '%s' vectors", vectorsName)
self.vectorsDict[vectorsName] = vectors
else:
# initialise defect arrays
self.interstitials = np.empty(inputState.NAtoms, dtype=np.int32)
self.vacancies = np.empty(refState.NAtoms, dtype=np.int32)
self.antisites = np.empty(refState.NAtoms, dtype=np.int32)
self.onAntisites = np.empty(refState.NAtoms, dtype=np.int32)
self.splitInterstitials = np.empty(3 * refState.NAtoms, dtype=np.int32)
# drift compensation
if self._driftCompensation:
filtering_c.calculate_drift_vector(inputState.NAtoms, inputState.pos, refState.pos,
refState.cellDims, inputState.PBC, self.driftVector)
self.logger.info("Calculated drift vector: (%f, %f, %f)" % tuple(self.driftVector))
# run filters
applyFiltersTime = time.time()
for filterName, filterSettings in zip(currentFilters, currentSettings):
# determine the name of filter module to be loaded
if filterName.startswith("Scalar: "):
moduleName = "genericScalarFilter"
filterObjectName = "GenericScalarFilter"
else:
words = str(filterName).title().split()
filterObjectName = "%sFilter" % "".join(words)
moduleName = filterObjectName[:1].lower() + filterObjectName[1:]
self.logger.debug("Loading filter module: '%s'", moduleName)
self.logger.debug("Creating filter object: '%s'", filterObjectName)
# get module
filterModule = getattr(filters, moduleName)
# load dialog
filterObject = getattr(filterModule, filterObjectName, None)
if filterObject is None:
self.logger.error("Could not locate filter object for: '%s'", filterName)
else:
self.logger.info("Running filter: '%s'", filterName)
# filter
filterObject = filterObject(filterName)
# construct filter input object
filterInput = base.FilterInput()
filterInput.visibleAtoms = self.visibleAtoms
filterInput.inputState = inputState
filterInput.refState = refState
filterInput.voronoiOptions = self.voronoiOptions
filterInput.bondDict = elements.bondDict
filterInput.NScalars, filterInput.fullScalars = self.makeFullScalarsArray()
filterInput.NVectors, filterInput.fullVectors = self.makeFullVectorsArray()
filterInput.voronoiAtoms = self.voronoiAtoms
filterInput.voronoiDefects = self.voronoiDefects
filterInput.driftCompensation = self._driftCompensation
filterInput.driftVector = self.driftVector
filterInput.vacancies = self.vacancies
filterInput.interstitials = self.interstitials
filterInput.splitInterstitials = self.splitInterstitials
filterInput.antisites = self.antisites
filterInput.onAntisites = self.onAntisites
filterInput.defectFilterSelected = defectFilterSelected
# run the filter
result = filterObject.apply(filterInput, filterSettings)
# cluster list
if result.hasClusterList():
self.clusterList = result.getClusterList()
# bubble list
if result.hasBubbleList():
self.bubbleList = result.getBubbleList()
# structure counters
if result.hasStructureCounterDict():
self.structureCounterDicts[result.getStructureCounterName()] = result.getStructureCounterDict()
# spaghetti analysis
if result.hasSpaghettiAtoms():
self.spaghettiAtoms = result.getSpaghettiAtoms()
# full vectors/scalars
self.storeFullScalarsArray(len(self.visibleAtoms), filterInput.NScalars, filterInput.fullScalars)
self.storeFullVectorsArray(len(self.visibleAtoms), filterInput.NVectors, filterInput.fullVectors)
# new scalars
self.scalarsDict.update(result.getScalars())
if defectFilterSelected:
nint = len(self.interstitials)
nvac = len(self.vacancies)
nant = len(self.antisites)
nsplit = len(self.splitInterstitials) / 3
num = nint + nvac + nant + nsplit
self.logger.info("%d visible defects", num)
else:
self.logger.info("%d visible atoms", len(self.visibleAtoms))
# species counts here
if len(self.visibleAtoms):
self.visibleSpecieCount = _rendering.countVisibleBySpecie(self.visibleAtoms, len(inputState.specieList),
inputState.specie)
if len(self.interstitials) + len(self.vacancies) + len(self.antisites) + len(self.splitInterstitials) > 0:
self.vacancySpecieCount = _rendering.countVisibleBySpecie(self.vacancies, len(refState.specieList),
refState.specie)
self.interstitialSpecieCount = _rendering.countVisibleBySpecie(self.interstitials,
len(inputState.specieList),
inputState.specie)
self.antisiteSpecieCount = _rendering.countAntisitesBySpecie(self.antisites, len(refState.specieList),
refState.specie, self.onAntisites,
len(inputState.specieList), inputState.specie)
self.splitIntSpecieCount = _rendering.countSplitIntsBySpecie(self.splitInterstitials,
len(inputState.specieList), inputState.specie)
# TODO: dictionary of calculated properties... ?? sum of voro vols etc...
# time to apply filters
applyFiltersTime = time.time() - applyFiltersTime
self.logger.debug("Apply filter(s) time: %f s", applyFiltersTime)
# refresh available scalars in extra options dialog
# self.parent.colouringOptions.refreshScalarColourOption()
# time
runFiltersTime = time.time() - runFiltersTime
self.logger.debug("Apply list total time: %f s", runFiltersTime)
def getBubblesIndices(self):
"""Return arrays for bubble vacancy and atom indices."""
bubbleVacs = []
bubbleAtoms = []
for bubble in self.bubbleList:
for index in bubble.vacancies():
bubbleVacs.append(index)
for index in bubble.atoms():
bubbleAtoms.append(index)
bubbleVacs = np.asarray(bubbleVacs, dtype=np.int32)
bubbleAtoms = np.asarray(bubbleAtoms, dtype=np.int32)
return bubbleVacs, bubbleAtoms
def povrayAtomsWrittenSlot(self, status, povtime, uniqueID):
"""
POV-Ray atoms have been written
"""
if not status:
self.povrayAtomsWritten = True
self.logger.debug("Povray atoms written in %f s (%s)", povtime, uniqueID)
def makeFullScalarsArray(self):
"""
Combine scalars array into one big array for passing to C
"""
self.logger.debug("Making full scalars array (N=%d)", len(self.scalarsDict) + len(self.latticeScalarsDict))
scalarsList = []
for name, scalars in six.iteritems(self.scalarsDict):
self.logger.debug(" Adding '%s' scalars", name)
scalarsList.append(scalars)
if len(scalars) != len(self.visibleAtoms):
raise RuntimeError("Wrong length for scalars: '{0}'".format(name))
for name, scalars in six.iteritems(self.latticeScalarsDict):
self.logger.debug(" Adding '%s' scalars (Lattice)", name)
scalarsList.append(scalars)
if len(scalars) != len(self.visibleAtoms):
raise RuntimeError("Wrong length for scalars: '{0}' (Lattice)".format(name))
if len(scalarsList):
scalarsFull = np.concatenate(scalarsList)
else:
scalarsFull = np.array([], dtype=np.float64)
return len(scalarsList), scalarsFull
def makeFullVectorsArray(self):
"""
Combine vectors array into one big array for passing to C
"""
self.logger.debug("Making full vectors array (N=%d)", len(self.vectorsDict))
vectorsList = []
for name, vectors in six.iteritems(self.vectorsDict):
self.logger.debug("Adding '%s' vectors", name)
vectorsList.append(vectors)
if vectors.shape != (len(self.visibleAtoms), 3):
raise RuntimeError("Shape wrong for vectors array '%s': %r != %r" % (name, vectors.shape,
(len(self.visibleAtoms), 3)))
if len(vectorsList):
vectorsFull = np.concatenate(vectorsList)
else:
vectorsFull = np.array([], dtype=np.float64)
return len(vectorsList), vectorsFull
def storeFullScalarsArray(self, NVisible, NScalars, scalarsFull):
"""
Split and resize full scalars array; store in dict
Assumes scalarsDict was not modified since we called
makeFullScalarsArray.
"""
if NScalars > 0:
self.logger.debug("Storing full scalars array")
scalarsList = np.split(scalarsFull, NScalars)
# Filterer.scalarsDict
keys = list(self.scalarsDict.keys())
lenError = False
for i, key in enumerate(keys):
self.logger.debug("Storing '%s' scalars", key)
scalars = scalarsList[i]
if len(scalars) < NVisible:
msg = "'%s' scalars smaller than expected (%d < %d) (this is expected in some situations); "
msg += "clearing scalars list"
self.logger.warning(msg, key, len(scalars), NVisible)
lenError = True
break
else:
scalars_cp = copy.copy(scalars)
scalars_cp.resize(NVisible, refcheck=False)
self.scalarsDict[key] = scalars_cp
if lenError:
self.scalarsDict.clear()
# Lattice.scalarsDict
offset = len(keys)
keys = list(self.latticeScalarsDict.keys())
lenError = False
for j, key in enumerate(keys):
self.logger.debug(" Storing '%s' scalars (Lattice)", key)
i = j + offset
scalars = scalarsList[i]
if len(scalars) < NVisible:
msg = "'%s' scalars smaller than expected (%d < %d) (this is expected in some situations); "
msg += "clearing scalars list"
self.logger.warning(msg, key, len(scalars), NVisible)
lenError = True
break
else:
scalars_cp = copy.copy(scalars)
scalars_cp.resize(NVisible, refcheck=False)
self.latticeScalarsDict[key] = scalars_cp
if lenError:
self.latticeScalarsDict.clear()
def storeFullVectorsArray(self, NVisible, NVectors, vectorsFull):
"""
Split and resize full vectors array; store in dict
Assumes vectorsDict was not modified since we called
makeFullVectorsArray.
"""
if NVectors > 0:
self.logger.debug("Storing full vectors array in dict")
vectorsList = np.split(vectorsFull, NVectors)
keys = list(self.vectorsDict.keys())
lenError = False
for key, vectors in zip(keys, vectorsList):
self.logger.debug(" Storing '%s' vectors", key)
if len(vectors) < NVisible:
msg = "'%s' vectors smaller than expected (%d < %d) (this is expected in some situations); "
msg += "clearing vectors list"
self.logger.warning(msg, key, len(vectors), NVisible)
lenError = True
break
else:
vectors_cp = copy.copy(vectors)
vectors_cp.resize((NVisible, 3), refcheck=False)
self.vectorsDict[key] = vectors_cp
if lenError:
self.vectorsDict.clear()
| 42.080537
| 119
| 0.563477
| 1,617
| 18,810
| 6.528757
| 0.209648
| 0.029364
| 0.029838
| 0.024249
| 0.26068
| 0.220991
| 0.140665
| 0.110922
| 0.100407
| 0.091693
| 0
| 0.004833
| 0.350984
| 18,810
| 446
| 120
| 42.174888
| 0.859928
| 0.079532
| 0
| 0.171141
| 0
| 0.010067
| 0.085908
| 0
| 0
| 0
| 0
| 0.002242
| 0
| 1
| 0.033557
| false
| 0.003356
| 0.050336
| 0
| 0.107383
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
551fb4d66e7bdb8b9da3ce7860f15d493f500f54
| 475
|
py
|
Python
|
Entree_in/rou.py
|
dipkhandait/Entree_in
|
5cc03555b1e8022262dffa0a0a459637fa9f49c7
|
[
"MIT"
] | null | null | null |
Entree_in/rou.py
|
dipkhandait/Entree_in
|
5cc03555b1e8022262dffa0a0a459637fa9f49c7
|
[
"MIT"
] | null | null | null |
Entree_in/rou.py
|
dipkhandait/Entree_in
|
5cc03555b1e8022262dffa0a0a459637fa9f49c7
|
[
"MIT"
] | null | null | null |
import sqlite3
connection = sqlite3.connect("Project.db")
cursor = connection.cursor()
S_user = cursor.execute(f"SELECT * FROM Current_login WHERE No = '{1}'")
record = S_user.fetchone()
user = record[0]
print(user)
try:
Spl_user = cursor.execute(f"SELECT * FROM Col_Pref_list where Username = '{user}'")
recordpl = Spl_user.fetchone()
userpl = recordpl[0]
print(userpl)
print("SUCCESSFULL")
except Exception as e:
print("Unsuccessful")
print(e)
| 26.388889
| 87
| 0.696842
| 64
| 475
| 5.0625
| 0.5625
| 0.030864
| 0.104938
| 0.111111
| 0.17284
| 0.17284
| 0
| 0
| 0
| 0
| 0
| 0.012626
| 0.166316
| 475
| 18
| 88
| 26.388889
| 0.805556
| 0
| 0
| 0
| 0
| 0
| 0.273109
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.0625
| 0
| 0.0625
| 0.3125
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
5520df1b613fa85e6ad8dd2ba56eeb9d62e2e7df
| 2,766
|
py
|
Python
|
tests/cases/yield_test.py
|
MiguelMarcelino/py2many
|
9b040b2a157e265df9c053eaf3e5cd644d3e30d0
|
[
"MIT"
] | 2
|
2022-02-02T11:37:53.000Z
|
2022-03-30T18:19:06.000Z
|
tests/cases/yield_test.py
|
MiguelMarcelino/py2many
|
9b040b2a157e265df9c053eaf3e5cd644d3e30d0
|
[
"MIT"
] | 25
|
2022-02-28T21:19:11.000Z
|
2022-03-23T21:26:20.000Z
|
tests/cases/yield_test.py
|
MiguelMarcelino/py2many
|
9b040b2a157e265df9c053eaf3e5cd644d3e30d0
|
[
"MIT"
] | null | null | null |
@resumable # For PyJL
def generator_func():
num = 1
yield num
num = 5
yield num
num = 10
yield num
@resumable
def generator_func_loop():
num = 0
for n in range(0, 3):
yield num + n
@resumable
def generator_func_loop_using_var():
num = 0
end = 2
end = 3 # should get last variable assignment
for n in range(0, end):
yield num + n
@resumable
def generator_func_nested_loop():
for n in range(0, 2):
for i in range(0, 2):
yield (n,i)
@resumable
def file_reader(file_name:str):
for file_row in open(file_name, "r"):
yield file_row
@resumable
def testgen():
print("first")
yield 1
print("second")
yield 2
@resumable
def fib():
a = 0
b = 1
while True:
yield a
a, b = b, a+b
class TestClass:
@resumable
def generator_func(self):
num = 123
yield num
num = 5
yield num
num = 10
yield num
if __name__ == "__main__":
# Calling functions normally (Supported)
arr1 = []
for i in generator_func():
arr1.append(i)
assert arr1 == [1, 5, 10]
# -----------------------
arr2 = []
for i in generator_func_loop():
arr2.append(i)
assert arr2 == [0, 1, 2]
# -----------------------
arr3 = []
for i in generator_func_loop_using_var():
arr3.append(i)
assert arr3 == [0, 1, 2]
# -----------------------
# Testing with class scope
arr4 = []
testClass1: TestClass = TestClass()
for i in testClass1.generator_func():
arr4.append(i)
assert arr4 == [123, 5, 10]
# -----------------------
# Testing nested loop
arr5 = []
for i in generator_func_nested_loop():
arr5.append(i)
assert arr5 == [(0,0), (0,1), (1,0), (1,1)]
# -----------------------
arr6 = []
# Create file before executing
for res in file_reader("C:/Users/Miguel Marcelino/Desktop/test.txt"):
arr6.append(res)
assert arr6 == ['test\n', 'test\n', 'test']
# -----------------------
arr7 = []
res = fib()
for i in range(0,6):
arr7.append(res.__next__())
assert arr7 == [0,1,1,2,3,5]
# -----------------------
for i in testgen():
print(i)
# -----------------------------------
# Calling functions using loop (unsupported in PyJL)
# testClass2: TestClass = TestClass()
# funcs = [generator_func, generator_func_loop, generator_func_loop_using_var, testClass2.generator_func,
# generator_func_nested_loop]
# arrL = []
# for func in funcs:
# for i in func():
# arrL.append(i)
# assert arrL == [1, 5, 10, 0, 1, 2, 0, 1, 2, 123, 5, 10, (0,0), (0,1), (1,0), (1,1)]
| 22.487805
| 109
| 0.5141
| 361
| 2,766
| 3.797784
| 0.232687
| 0.142232
| 0.039387
| 0.072939
| 0.268417
| 0.142961
| 0.109409
| 0.05981
| 0.04814
| 0.04814
| 0
| 0.054555
| 0.297542
| 2,766
| 123
| 110
| 22.487805
| 0.651055
| 0.267896
| 0
| 0.261905
| 0
| 0
| 0.038961
| 0.012987
| 0
| 0
| 0
| 0
| 0.083333
| 1
| 0.095238
| false
| 0
| 0
| 0
| 0.107143
| 0.035714
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
5521cf44b0712c36f70ad0bbd932c50c521247f6
| 1,252
|
py
|
Python
|
apps/api/quipper/main.py
|
phillamb168/system-design
|
d074409211521c930a2b355cac102caef827d627
|
[
"Apache-2.0"
] | 3
|
2021-11-12T11:00:35.000Z
|
2022-02-16T10:33:53.000Z
|
apps/api/quipper/main.py
|
phillamb168/system-design
|
d074409211521c930a2b355cac102caef827d627
|
[
"Apache-2.0"
] | null | null | null |
apps/api/quipper/main.py
|
phillamb168/system-design
|
d074409211521c930a2b355cac102caef827d627
|
[
"Apache-2.0"
] | 8
|
2021-08-04T18:47:18.000Z
|
2022-03-15T10:14:32.000Z
|
from fastapi import (
Depends,
FastAPI,
)
from fastapi.middleware.cors import CORSMiddleware
from sqlalchemy.orm import Session
from quipper import (
models,
schemas,
services,
)
from quipper.database import (
SessionLocal,
engine,
)
# Create the tables
models.Base.metadata.create_all(bind=engine)
app = FastAPI()
app.add_middleware(
CORSMiddleware,
allow_origins=["*"],
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
# https://fastapi.tiangolo.com/tutorial/dependencies/dependencies-with-yield
def get_db():
db = SessionLocal()
try:
yield db
finally:
db.close()
@app.get("/healthz/", status_code=200)
def get_health():
return {"healthy": True}
@app.post("/messages/", status_code=201)
def post_message(message: schemas.MessageCreate,
db: Session = Depends(get_db)):
services.create_message(db=db, message=message)
@app.get("/conversations/{conversation_id}",
response_model=schemas.Conversation)
def get_conversation(conversation_id: str,
db: Session = Depends(get_db)):
return services.get_conversation(db=db,
conversation_id=conversation_id)
| 21.220339
| 76
| 0.664537
| 139
| 1,252
| 5.834532
| 0.453237
| 0.069051
| 0.039457
| 0.046856
| 0.051788
| 0
| 0
| 0
| 0
| 0
| 0
| 0.006122
| 0.217252
| 1,252
| 58
| 77
| 21.586207
| 0.821429
| 0.073482
| 0
| 0.046512
| 0
| 0
| 0.052723
| 0.027658
| 0
| 0
| 0
| 0
| 0
| 1
| 0.093023
| false
| 0
| 0.116279
| 0.046512
| 0.255814
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
5522df8e11c9c1d695b6fd19a76f81ba0655ccc5
| 3,611
|
py
|
Python
|
data_structures/adjacency_list_graph.py
|
avg-2049-joe/py-algo-ds
|
4f9c3c086e134ee23fcc0ee3b981e81f40e860cd
|
[
"MIT"
] | null | null | null |
data_structures/adjacency_list_graph.py
|
avg-2049-joe/py-algo-ds
|
4f9c3c086e134ee23fcc0ee3b981e81f40e860cd
|
[
"MIT"
] | null | null | null |
data_structures/adjacency_list_graph.py
|
avg-2049-joe/py-algo-ds
|
4f9c3c086e134ee23fcc0ee3b981e81f40e860cd
|
[
"MIT"
] | null | null | null |
"""Adjacency list is a graph representation using array or a hash map"""
from collections import deque
class AdjacencyListGraph:
"""Graph representation using dictionary"""
def __init__(self):
self.__nodes = {}
def __str__(self):
return str(self.__nodes)
def insert_vertex(self, data):
"""Insert a vertex and its relationships """
self.__nodes[data] = set()
def insert_edge(self, start_data, end_data):
"""Insert a relationship b/w edges"""
adj_list_start = self.__nodes.get(start_data, None)
adj_list_end = self.__nodes.get(end_data, None)
if adj_list_start is not None and adj_list_end is not None:
adj_list_start.add(end_data)
adj_list_end.add(start_data)
else:
raise Exception('vertexes are not found')
def depth_first_search_path(self, source, destination, visited=None):
"""Looks for a path from source - destination"""
print(source, visited)
if visited is None:
visited = set()
# the source and destination must be vertices in the graph
if source not in self.__nodes or destination not in self.__nodes:
return False
# if we looked through the source already
# then the path does not exist
if source in visited:
return False
# add to the visited list
visited.add(source)
# if the source and destination are equal then we found a path
if source == destination:
return True
# ask the adj vertices to see if the path exists
for adj_vertex in self.__nodes[source]:
if self.depth_first_search_path(adj_vertex, destination, visited):
return True
# if the adj list is exhausted then return False as path does not exist
return False
def breadth_first_search(self, source, destination):
"""Do a breadth first search - queue & set"""
visited = set()
# source and destination must be valid vertices in the graph
if source not in self.__nodes or destination not in self.__nodes:
return False
# create a queue and add source to it
queue = deque()
queue.appendleft(source)
while len(queue):
vertex = queue.pop()
if vertex == destination:
return True
# if the vertex is already visited, nothing to do
if vertex in visited:
continue
# add to visited set
visited.add(vertex)
# at the end add all of the adj vertices to the queue
for adj_vertex in self.__nodes[vertex]:
queue.appendleft(adj_vertex)
return False
def create_graph():
"""Create and insert vertices and paths"""
graph = AdjacencyListGraph()
graph.insert_vertex(0)
graph.insert_vertex(1)
graph.insert_vertex(2)
graph.insert_vertex(3)
graph.insert_edge(0, 1)
graph.insert_edge(1, 2)
graph.insert_edge(1, 3)
graph.insert_edge(2, 3)
return graph
def test_adjacency_list_graph():
"""Simple test for the graph implementation"""
graph = create_graph()
print(graph)
def test_dfs():
"""Depth first search a path"""
graph = create_graph()
print(graph.depth_first_search_path(0, 3))
def test_bfs():
"""Breadth first search a path"""
graph = create_graph()
print(graph.breadth_first_search(0, 3))
if __name__ == '__main__':
test_adjacency_list_graph()
test_dfs()
test_bfs()
| 27.356061
| 79
| 0.623927
| 476
| 3,611
| 4.529412
| 0.226891
| 0.045918
| 0.030612
| 0.025974
| 0.168831
| 0.132653
| 0.111317
| 0.111317
| 0.111317
| 0.072356
| 0
| 0.006319
| 0.298809
| 3,611
| 131
| 80
| 27.564886
| 0.845182
| 0.258377
| 0
| 0.211268
| 0
| 0
| 0.011468
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.140845
| false
| 0
| 0.014085
| 0.014085
| 0.309859
| 0.056338
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
5523fbe79d2ce42233aa94670f1b7a5b2c7819fa
| 4,515
|
py
|
Python
|
monroe-netalyzr/files/runme2.py
|
ana-cc/dockerstuffs
|
98131138731dd3c7a18e4e1a3b6975e3778502f9
|
[
"BSD-2-Clause"
] | 1
|
2020-09-10T19:15:09.000Z
|
2020-09-10T19:15:09.000Z
|
monroe-netalyzr/files/runme2.py
|
ana-cc/dockerstuffs
|
98131138731dd3c7a18e4e1a3b6975e3778502f9
|
[
"BSD-2-Clause"
] | null | null | null |
monroe-netalyzr/files/runme2.py
|
ana-cc/dockerstuffs
|
98131138731dd3c7a18e4e1a3b6975e3778502f9
|
[
"BSD-2-Clause"
] | null | null | null |
#!/usr/bin/python
import json
import subprocess
import logging
from pyroute2 import IPDB
import sys
logging.basicConfig(level=logging.DEBUG)
logger = logging.getLogger("runme")
def add_dns(interface):
str = ""
try:
with open('/dns') as dnsfile:
dnsdata = dnsfile.readlines()
dnslist = [ x.strip() for x in dnsdata ]
for item in dnslist:
if interface in item:
str += item.split('@')[0].replace("server=", "nameserver ")
str += "\n"
str += "nameserver 8.8.8.8\n"
with open("/etc/resolv.conf", "w") as f:
f.write(str)
except:
str = "Could not find DNS file"
return str
def main():
ip = IPDB()
s = set([interface.ifname for interface in ip.interfaces.values()])
logger.debug("Interfaces encountered: ")
logger.debug(s)
try:
s.remove('lo')
s.remove('metadata')
except:
logger.debug("Metadata or lo not found!\n")
try:
with open('/monroe/config') as configfile:
config = json.load(configfile)
nodeid = config['nodeid']
except:
nodeid = 'could-not-get-id'
try:
with open('/monroe/config') as configfile:
config = json.load(configfile)
operator = config['operator']
except:
operator = 'N/A'
subprocess.call(['mkdir', '/tmp/res/'])
result_files=[]
for item in s:
if item in ['op0', 'op1']:
logger.debug("Running on interface: " + item)
try:
subprocess.call(['route', 'del','default'])
logger.debug("Default route deleted.")
except Exception as e:
logger.debug(e)
try:
subprocess.call(['route', 'add','default','dev', item])
logger.debug("Default route for interface " + item + " was added.")
except Exception as e:
logger.debug(e)
try:
a = add_dns(item)
logger.debug("DNS added\n")
logger.debug(a)
logger.debug("Testing connectivity...\n")
process = subprocess.Popen(['ping' , '-c', '3', '8.8.8.8'], stdout=subprocess.PIPE)
result_ping = process.communicate()[0]
logger.debug(result_ping)
except Exception as e:
logger.debug(e)
try:
logger.debug("Running netalyzr...\n")
process = subprocess.Popen(['java', '-jar', '/opt/monroe/NetalyzrCLI.jar'])
process.wait()
result = "See container.log"
logger.debug("Finished running netalyzr...\n")
except Exception as e:
logger.debug(e)
wr_str = "/tmp/res/mnr-" +str(nodeid) + "_" + item
result_files.append(wr_str)
try:
logger.debug("Verifying resolv.conf...\n")
dnsproc = subprocess.Popen(['cat','/etc/resolv.conf'], stdout=subprocess.PIPE)
result_dns = dnsproc.communicate()[0]
except Exception as e:
result_dns = e
try:
logger.debug("Writing results to file...\n")
with open(wr_str, 'w') as wr_file:
wr_file.write("ID: " + str(nodeid) + "\n")
wr_file.write("Interface: " + item +"\n")
wr_file.write("Operator: " + str(operator) +"\n")
wr_file.write("Resolv.conf:\n" + str(result_dns)+ "\n")
wr_file.write("Ping Results:\n" + str(result_ping)+ "\n")
with open(wr_str, 'a') as wr_file:
wr_file.write(result)
logger.debug("Acquiring global IP address...\n")
process = subprocess.Popen(['curl', 'https://stat.ripe.net/data/whats-my-ip/data.json'], stdout=subprocess.PIPE)
result_ripe = process.communicate()[0]
with open(wr_str, 'a') as wr_file:
wr_file.write(result_ripe)
except Exception as e:
logger.debug(e)
for result_file in result_files:
try:
subprocess.call(['/usr/bin/mv', result_file, '/monroe/results/'])
except Exception as e:
logger.debug(e)
ip.release()
if __name__ == "__main__":
sys.exit(main())
| 32.021277
| 127
| 0.50897
| 499
| 4,515
| 4.527054
| 0.280561
| 0.102258
| 0.052678
| 0.055777
| 0.18548
| 0.17884
| 0.170429
| 0.130589
| 0.086764
| 0.086764
| 0
| 0.005502
| 0.355925
| 4,515
| 140
| 128
| 32.25
| 0.771321
| 0.003544
| 0
| 0.306306
| 0
| 0
| 0.172115
| 0.006004
| 0
| 0
| 0
| 0
| 0
| 1
| 0.018018
| false
| 0
| 0.045045
| 0
| 0.072072
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
9b28efc68a829abe66b1f36c000e5c38c0f766de
| 9,725
|
py
|
Python
|
teptools/summarise.py
|
nelsyeung/teptools
|
90a8cde2793e509b30c6fca0c3f64320855cf7c6
|
[
"MIT"
] | null | null | null |
teptools/summarise.py
|
nelsyeung/teptools
|
90a8cde2793e509b30c6fca0c3f64320855cf7c6
|
[
"MIT"
] | null | null | null |
teptools/summarise.py
|
nelsyeung/teptools
|
90a8cde2793e509b30c6fca0c3f64320855cf7c6
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
import sys
import os
import argparse
import subprocess
import textwrap
import helpers
class Summarise():
def __init__(self, file, term_cols):
self.term_cols = term_cols
self.cols_width = [21, 22, 11, 15, 22]
self.in_lnv = False
self.lnv_iteration = 0
self.iteration = 0
self.rms_gradient = 0.0
self.energy = 0.0
self.old_energy = 0.0
self.step_size = 0.0
self.commutator = 0.0
self.message = ''
self.time_taken = ''
self.num_processor = 0
self.iteration_header, self.iteration_info = self.unformatted_str()
self.summary = []
with open(file, 'r') as f:
self.file_lines = f.readlines()
def unformatted_str(self):
"""Return summary info string for .format function."""
header = ''
info = ''
num_decimals = [14, 14, 6, 11, 14]
while self.term_cols < sum(self.cols_width) + 5:
self.cols_width = [x-1 for x in self.cols_width]
num_decimals = [x-1 for x in num_decimals]
for i in range(len(self.cols_width)):
header += '|{:^' + str(self.cols_width[i] - 1) + '}'
info += ('{:' + str(self.cols_width[i]) +
'.' + str(num_decimals[i]) + 'f}')
header = '| i' + header + '|'
info = '{:3d}' + info + '{}'
return header, info
def parse_line(self, line):
"""Parse the contents of the file line currently on."""
linesplit = line.split()
if 'TOTAL TIME' in line:
self.time_taken = linesplit[2]
self.num_processor = linesplit[4]
elif 'RMS gradient' in line and 'NGWF' not in line:
self.rms_gradient = float(linesplit[-1])
elif 'step 0' in line:
self.old_energy = self.energy
self.energy = float(linesplit[5])
elif ('Selected quadratic step' in line or
'Selected cubic step' in line):
self.step_size = float(linesplit[4])
elif ('Starting BFGS iteration' in line or
'improving iteration' in line):
self.energy = 0.0
self.old_energy = 1
elif 'RMS NGWF gradient =' in line:
self.rms_gradient = float(linesplit[5])
elif 'BFGS: starting iteration' in line:
self.iteration = 0
elif '| commutator' in line:
self.in_lnv = True
self.lnv_iteration = 1
elif (line and self.in_lnv and
str(self.lnv_iteration) == linesplit[0]):
try:
self.commutator = float(linesplit[3])
except ValueError:
pass
self.lnv_iteration += 1
elif 'Finished density kernel iterations' in line:
self.in_lnv = False
elif 'WARNING: maximum number of NGWF CG iterations' in line:
self.message = ' <-- MAXIT_NGWF_CG EXCEEDED'
elif 'NGWF optimisation converged' in line:
self.message = ' <-- CG CONVERGED'
elif 'NGWF CG iteration 001' in line:
return self.iteration_header.format(
'RMS Gradient', 'Total Energy', 'Step', 'Commutator',
'Change')
elif ('Job started' in line or
'Moving atom' in line or
'WARNING: slope along search direction' in line):
return line
elif 'NGWF line search finished' in line:
self.iteration += 1
return self.iteration_info.format(
self.iteration, self.rms_gradient,
self.energy, self.step_size, self.commutator,
self.energy - self.old_energy, '')
elif '-- CG' in line:
iteration = self.iteration + 1
self.iteration = 0
self.old_energy = self.energy
self.energy = float(linesplit[2])
return self.iteration_info.format(
iteration, self.rms_gradient, self.energy,
0, self.commutator,
self.energy - self.old_energy, self.message)
elif 'Job completed' in line:
return '{} in {} on {} processors'.format(
line, self.time_taken, self.num_processor)
if ('BFGS' in line and
'BFGS: line :' not in line and
'BFGS: trial:' not in line and
'BFGS: quad :' not in line):
return line
return ''
def run(self, line_print):
"""Parse the whole file and store the summary."""
for line in self.file_lines:
result = self.parse_line(line.strip())
if result:
self.summary.append(result)
if line_print:
print(result)
def parser(default_args, args):
"""Return parsed command line arguments."""
parser = argparse.ArgumentParser(
description=(
'Extracts the results of the NGWF CG optimisation steps from an\n'
'output file (which may still be running) and output them in a\n'
'format as if you were running with output_detail=BRIEF or\n'
'looking at the calculation summary.'),
formatter_class=argparse.RawTextHelpFormatter)
parser.add_argument(
'outfiles', metavar='outfile', type=str, nargs='*',
help='ONETEP output files to be summarised\n'
'If none is specified then all out files (*.out)\n'
'in the current directory will be read')
parser.add_argument(
'-vd', '--vimdiff', action='store_true',
help='Open multiple outputs in vimdiff')
parser.add_argument(
'--no-vimdiff', action='store_false', dest='vimdiff',
help='Prevent opening multiple outputs in vimdiff')
parser.add_argument(
'-o', '--output', action='store_true',
help='Write each output into its own file')
parser.add_argument(
'--no-output', action='store_false', dest='output',
help='Prevent writing each output into its own file')
if args is None: # pragma: no cover
if default_args == ['']:
default_args = []
args = default_args
args.extend(sys.argv[1:])
return parser.parse_args(args)
def print_side_view(summaries, col_width):
"""Print two summaries side-by-side."""
wrapper = textwrap.TextWrapper(width=col_width)
indices = [0, 0]
locks = [False, False]
unlocks = [False, False]
sync_lines = ['| i|']
completed = False
while indices[0] < len(summaries[0]) or indices[1] < len(summaries[1]):
outputs = ['--', '--'] # Dashes for empty lines to avoid confusions
if completed:
unlocks = [True, True]
for j in range(2):
if (unlocks[j] or not locks[j]) and indices[j] < len(summaries[j]):
if (not unlocks[j] and
any(line in summaries[j][indices[j]]
for line in sync_lines)):
locks[j] = True
else:
wrapped = wrapper.wrap(summaries[j][indices[j]])
outputs[j] = wrapped[0]
locks[j] = False
unlocks[j] = False
indices[j] += 1
if len(wrapped) > 1:
summaries[j].insert(indices[j], wrapped[1])
if indices[j] == len(summaries[j]):
completed = True
if locks[0] and locks[1]:
unlocks = [True, True]
if not locks[0] or not locks[1]:
print(('{:<' + str(col_width) + '}' +
'{:' + str(col_width) + '}').format(
outputs[0], outputs[1]))
def main(args=None, rcfile=None):
default_config = {
'options': [],
'outfile_ext': 'out'
}
config = helpers.parse_rcfile(rcfile, 'summarise', default_config)
args = parser(config['options'], args)
try:
term_cols = int(subprocess.check_output(['stty', 'size']).split()[1])
except subprocess.CalledProcessError:
term_cols = 180 # Minimum size required for proper display
outfiles = helpers.find_files(args.outfiles, config['outfile_ext'])
# Always disable vimdiff mode if only one output file is specified
if len(outfiles) == 1:
args.vimdiff = False
iswrite = args.output or args.vimdiff
newfiles = [] # files created by write output mode
summaries = [] # Only used for side-by-side view
side_view = not iswrite and len(outfiles) == 2 and term_cols >= 180
term_cols = int(term_cols / 2) if side_view else term_cols
line_print = not iswrite and not side_view
for file in outfiles:
summarise = Summarise(file, term_cols)
summarise.run(line_print)
if iswrite:
filename = os.path.splitext(os.path.basename(file))[0]
newfile, summary_file = helpers.create_file(filename, 'summary')
newfiles.append(newfile)
if side_view:
summaries.append(summarise.summary)
for line in summarise.summary:
if iswrite:
summary_file.write(line + '\n')
if iswrite:
summary_file.close()
if not args.vimdiff:
print(file + ' summary > ' + newfile)
if side_view:
print_side_view(summaries, term_cols)
if args.vimdiff:
subprocess.call('vimdiff ' + ' '.join(newfiles), shell=True)
if not args.output and args.vimdiff:
for newfile in newfiles:
os.remove(newfile)
if __name__ == '__main__': # pragma: no cover
main()
| 34.003497
| 79
| 0.55671
| 1,158
| 9,725
| 4.569948
| 0.228843
| 0.028345
| 0.022676
| 0.006803
| 0.150794
| 0.102797
| 0.069917
| 0.017763
| 0.017763
| 0
| 0
| 0.013893
| 0.333882
| 9,725
| 285
| 80
| 34.122807
| 0.803026
| 0.049974
| 0
| 0.120536
| 0
| 0
| 0.141321
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.03125
| false
| 0.004464
| 0.026786
| 0
| 0.102679
| 0.040179
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
9b292f152e109d6afd9ce99c12e29518dfc1f37f
| 3,202
|
py
|
Python
|
catfeeder_stepper.py
|
novalis111/catfeeder
|
4597bb24b4d159b9a79ef18e808ccab15391c659
|
[
"MIT"
] | null | null | null |
catfeeder_stepper.py
|
novalis111/catfeeder
|
4597bb24b4d159b9a79ef18e808ccab15391c659
|
[
"MIT"
] | null | null | null |
catfeeder_stepper.py
|
novalis111/catfeeder
|
4597bb24b4d159b9a79ef18e808ccab15391c659
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Import required libraries
import time
import random
import datetime
import RPi.GPIO as GPIO
# Use BCM GPIO references instead of physical pin numbers
GPIO.setmode(GPIO.BCM)
GPIO.setup(18, GPIO.IN, pull_up_down=GPIO.PUD_UP)
# 1 step = 1/4 of full
def rotate(movesteps, rotation='r', speed=1):
# One sequence is eight mini steps, 512 steps = 360°
movesteps = movesteps * 128 * 8
if speed < 1:
speed = 1
# Define GPIO signals to use
# Physical pins 11,15,16,18
# GPIO17,GPIO22,GPIO23,GPIO24
step_pins = [17, 22, 23, 24]
# Set all pins as output
for pin in step_pins:
GPIO.setup(pin, GPIO.OUT)
GPIO.output(pin, False)
# Define advanced sequence
# as shown in manufacturers datasheet
seq = [[1, 0, 0, 1],
[1, 0, 0, 0],
[1, 1, 0, 0],
[0, 1, 0, 0],
[0, 1, 1, 0],
[0, 0, 1, 0],
[0, 0, 1, 1],
[0, 0, 0, 1]]
step_count = len(seq)
if rotation == 'r':
step_dir = 1
else:
step_dir = -1
# Set to 1 or 2 for clockwise
# Set to -1 or -2 for anti-clockwise
# Initialise variables
step_counter = 0
# Start main loop
while movesteps > 0:
for pin in range(0, 4):
xpin = step_pins[pin] # Get GPIO
if seq[step_counter][pin] != 0:
GPIO.output(xpin, True)
else:
GPIO.output(xpin, False)
step_counter += step_dir
# If we reach the end of the sequence
# start again
if step_counter >= step_count:
step_counter = 0
if step_counter < 0:
step_counter = step_count + step_dir
# Wait before moving on
time.sleep(speed / float(1000))
movesteps -= 1
# reset pins
for pin in step_pins:
GPIO.output(pin, False)
'''
limit = 5
cur_rot = 'r'
while limit > 0:
steps = random.randint(0, 4)
pace = random.randint(0, 5)
if cur_rot == 'r':
cur_rot = 'l'
else:
cur_rot = 'r'
rotate(steps, cur_rot, pace)
limit -= 1
'''
lastpress = 0
lastfeed = 0
while True:
input_state = GPIO.input(18)
if not input_state:
# Button pressed
press_ago = time.time() - lastpress
lastfeed_ago = time.time() - lastfeed
if press_ago > 10800 or lastfeed_ago > 10800:
# Full load -> 5 full rounds = 5*4 steps
print("Full feed")
rotate(20)
lastpress = time.time()
lastfeed = time.time()
elif press_ago > 300:
# 5 minutes ago, only one round
print("Medium feed")
rotate(4)
lastpress = time.time()
elif press_ago > 60:
# 1 minute ago, only tiny move
print("Tiny feed")
rotate(1)
lastpress = time.time()
else:
print("No Feed yet")
print("Last Feed was " + str(round(lastfeed_ago / 60, 1)) + " minutes ago")
print("Next full Feed is in " + str(round((10800 - lastfeed_ago) / 60, 1)) + " minutes")
time.sleep(5)
GPIO.cleanup()
| 25.212598
| 100
| 0.538101
| 440
| 3,202
| 3.836364
| 0.340909
| 0.015403
| 0.012441
| 0.014218
| 0.133294
| 0.05628
| 0.018365
| 0.018365
| 0.016588
| 0.016588
| 0
| 0.067437
| 0.351655
| 3,202
| 126
| 101
| 25.412698
| 0.745183
| 0.206121
| 0
| 0.171429
| 0
| 0
| 0.042563
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.014286
| false
| 0
| 0.057143
| 0
| 0.071429
| 0.085714
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
9b2ae5c5d7a422f9f2eda94d4feb8800f0416e6f
| 604
|
py
|
Python
|
SpoTwillio/search.py
|
Natfan/funlittlethings
|
80d5378b45b5c0ead725942ee50403bd057514a6
|
[
"MIT"
] | 1
|
2017-12-03T15:08:42.000Z
|
2017-12-03T15:08:42.000Z
|
SpoTwillio/search.py
|
Natfan/funlittlethings
|
80d5378b45b5c0ead725942ee50403bd057514a6
|
[
"MIT"
] | 2
|
2017-09-25T12:43:41.000Z
|
2021-05-07T14:29:27.000Z
|
SpoTwillio/search.py
|
Natfan/funlittlethings
|
80d5378b45b5c0ead725942ee50403bd057514a6
|
[
"MIT"
] | 1
|
2017-09-04T19:37:42.000Z
|
2017-09-04T19:37:42.000Z
|
import spotipy
import argparse
sp = spotipy.Spotify()
parser = argparse.ArgumentParser()
parser.add_argument("term", help="The artist that you want to search for")
parser.add_argument("-c", "--count", help="The amount of results that you want, capped at 20", type=int)
args = parser.parse_args()
def spoprint():
results = sp.search(q=args.term, limit=args.count)
for i, t in enumerate(results['tracks']['items']):
print(' ', i, t['name'])
if args.count:
if args.count > 20:
print("enter a count lower than or equal to 20")
else:
spoprint()
else:
spoprint()
| 27.454545
| 104
| 0.662252
| 89
| 604
| 4.460674
| 0.573034
| 0.06801
| 0.085642
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.012295
| 0.192053
| 604
| 21
| 105
| 28.761905
| 0.80123
| 0
| 0
| 0.222222
| 0
| 0
| 0.256623
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.055556
| false
| 0
| 0.111111
| 0
| 0.166667
| 0.277778
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
9b2b9f0e1320f2cd7097a2cc090fb00dcd38d4c6
| 1,431
|
py
|
Python
|
wstest/handler/current_effect_status_handler_test.py
|
PedalController/PedalPiREST
|
aa9418d44f2f5dbec604753a03bf8a74057c627c
|
[
"Apache-2.0"
] | null | null | null |
wstest/handler/current_effect_status_handler_test.py
|
PedalController/PedalPiREST
|
aa9418d44f2f5dbec604753a03bf8a74057c627c
|
[
"Apache-2.0"
] | 42
|
2016-07-04T11:17:54.000Z
|
2018-03-18T18:36:09.000Z
|
wstest/handler/current_effect_status_handler_test.py
|
PedalController/PedalPiREST
|
aa9418d44f2f5dbec604753a03bf8a74057c627c
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2017 SrMouraSilva
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from wstest.handler.handler_test import Test
class CurrentEffectStatusHandlerTest(Test):
def test_put(self):
original_current_index = self.rest.get_current_index().json()
bank = self.default_bank_mock
bank.index = self.rest.create_bank(bank).json()['index']
pedalboard = bank.pedalboards[0]
self.rest.set_current_pedalboard(pedalboard)
effect = pedalboard.effects[0]
response = self.rest.toggle_effect_current_pedalboard(effect)
self.assertEqual(Test.SUCCESS, response.status_code)
response = self.rest.get_pedalboard(pedalboard)
effect.toggle()
self.assertEqual(pedalboard.json, response.json())
self.rest.set_current_pedalboard_by_index(original_current_index['bank'], original_current_index['pedalboard'])
self.rest.delete_bank(bank)
| 35.775
| 119
| 0.735849
| 189
| 1,431
| 5.439153
| 0.497355
| 0.054475
| 0.058366
| 0.031128
| 0.054475
| 0
| 0
| 0
| 0
| 0
| 0
| 0.008496
| 0.177498
| 1,431
| 39
| 120
| 36.692308
| 0.864911
| 0.383648
| 0
| 0
| 0
| 0
| 0.021889
| 0
| 0
| 0
| 0
| 0
| 0.125
| 1
| 0.0625
| false
| 0
| 0.0625
| 0
| 0.1875
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
9b374ff2739d4dbdac37bc7c8c986c366e319b95
| 1,250
|
py
|
Python
|
dmsl-runner/main.py
|
GloomyGhost-MosquitoSeal/DmslRunner
|
b541c27f9a9857012b465e153b5de827a8db4b29
|
[
"Apache-2.0"
] | null | null | null |
dmsl-runner/main.py
|
GloomyGhost-MosquitoSeal/DmslRunner
|
b541c27f9a9857012b465e153b5de827a8db4b29
|
[
"Apache-2.0"
] | null | null | null |
dmsl-runner/main.py
|
GloomyGhost-MosquitoSeal/DmslRunner
|
b541c27f9a9857012b465e153b5de827a8db4b29
|
[
"Apache-2.0"
] | null | null | null |
import os
import sys
import json
import time
import base64
import subprocess
DEBUG = 0
def json_paser(injson):
injson = str.encode(injson)
injson = base64.b64decode(injson)
jsonin = json.loads(injson)
if jsonin["language"] == "dmsl":
code = jsonin["code"]
code = bytes.decode(base64.b64decode(code))
if DEBUG == 1:
print(code)
return code
else:
print("json error")
def dmsl_runner(code):
temp = str(time.time()).replace(".", "a")[0: 13]
os.mkdir("/tmp/" + temp)
path = "/tmp/" + temp
f = open(path + "/demo.dmsl", "w+")
f.write(code)
f.close()
run_path = "cd " + path + " && DmslRunner demo.dmsl"
try:
outstd = subprocess.Popen(run_path, shell=True, stdout=subprocess.PIPE)
outstd.wait()
if DEBUG == 1:
print(path)
print(run_path)
print(outstd)
out = outstd.stdout.read()
out = bytes.decode(out)
except:
out = "你的dmsl代码错误啦,回去重新学!"
return out
if __name__ == "__main__":
if len(sys.argv) < 2:
print("Arg Error")
else:
instr = sys.argv[1]
sta1 = json_paser(instr)
sta2 = dmsl_runner(sta1)
print(sta2)
| 22.321429
| 79
| 0.5576
| 154
| 1,250
| 4.428571
| 0.435065
| 0.030792
| 0.02346
| 0.038123
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.025258
| 0.3032
| 1,250
| 56
| 80
| 22.321429
| 0.75775
| 0
| 0
| 0.085106
| 0
| 0
| 0.089528
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.042553
| false
| 0
| 0.12766
| 0
| 0.212766
| 0.148936
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
9b38e1564807028d56e7217b8ffcc8fe5b8fefc8
| 399
|
py
|
Python
|
Task2G.py
|
JoeBarney1/floodlevelmonitor131
|
98d93ca3d5bf6d1f2f105529d2f758450f791188
|
[
"MIT"
] | 1
|
2022-01-23T19:30:19.000Z
|
2022-01-23T19:30:19.000Z
|
Task2G.py
|
JoeBarney1/floodlevelmonitor131
|
98d93ca3d5bf6d1f2f105529d2f758450f791188
|
[
"MIT"
] | null | null | null |
Task2G.py
|
JoeBarney1/floodlevelmonitor131
|
98d93ca3d5bf6d1f2f105529d2f758450f791188
|
[
"MIT"
] | null | null | null |
from floodsystem.flood import highest_risk
from floodsystem.stationdata import build_station_list
def run():
"""Requirements for Task 2G"""
stations= build_station_list()
for s in highest_risk(stations,dt=3,N=10,y=3):
print(s)
#works with whole list but takes v. long time
if __name__ == "__main__":
print("*** Task 2G: CUED Part IA Flood Warning System ***")
run()
| 30.692308
| 63
| 0.691729
| 59
| 399
| 4.440678
| 0.694915
| 0.114504
| 0.122137
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.018692
| 0.195489
| 399
| 13
| 64
| 30.692308
| 0.797508
| 0.172932
| 0
| 0
| 0
| 0
| 0.178462
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.111111
| false
| 0
| 0.222222
| 0
| 0.333333
| 0.222222
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
9b4525dbfb8005b0d75af097477692fce19cafdb
| 3,854
|
py
|
Python
|
main.py
|
poplock1/Number_guesser
|
2766d73e0f1babc980865de8fabe2f37d9ee43a6
|
[
"MIT"
] | null | null | null |
main.py
|
poplock1/Number_guesser
|
2766d73e0f1babc980865de8fabe2f37d9ee43a6
|
[
"MIT"
] | null | null | null |
main.py
|
poplock1/Number_guesser
|
2766d73e0f1babc980865de8fabe2f37d9ee43a6
|
[
"MIT"
] | null | null | null |
import pygame
import tensorflow as tf
import sys
import settings as stg
import matplotlib.pyplot as plt
from board import Grid
import gui
import numpy as np
class Game():
def __init__(self):
pygame.init()
self.game_display = pygame.display.set_mode(
(stg.display_x, stg.display_y))
pygame.display.set_caption(stg.display_title)
self.tf_model = tf.keras.models.load_model('num_reader.model')
self.running = True
self.guessing = False
self.drawing = True
self.clock = pygame.time.Clock()
self.click = False
self.prediction_text = None
def new_board(self):
if self.running:
self.guessing = False
self.drawing = True
self.board = Grid()
def new_guess(self):
self.info_text1 = gui.TextWindow(
stg.text3_x, stg.text3_y, stg.text3_w, stg.text3_h, stg.text3_text_color, stg.text3_text, stg.text3_font)
self.info_text2 = gui.TextWindow(
stg.text4_x, stg.text4_y, stg.text4_w, stg.text4_h, stg.text4_text_color, stg.text4_text, stg.text4_font)
while self.running:
self.new_board()
self.run()
def run(self):
self.playing = True
while self.playing:
self.clock.tick(60)
self.events()
self.update()
self.draw()
def events(self):
for event in pygame.event.get():
if event.type == pygame.QUIT:
self.playing = False
self.running = False
pygame.quit()
sys.exit()
if event.type == pygame.MOUSEBUTTONDOWN:
self.click = True
if event.type == pygame.MOUSEBUTTONUP:
self.click = False
self.mouse_pos = pygame.mouse.get_pos()
self.keys = pygame.key.get_pressed()
if self.keys[pygame.K_ESCAPE]:
self.playing = False
self.running = False
pygame.quit()
sys.exit()
if self.keys[pygame.K_SPACE]:
self.drawing = False
self.guessing = True
if self.keys[pygame.K_c]:
self.new_board()
def update(self):
if self.drawing:
if self.click:
self.board.update()
elif self.guessing:
self.guess()
def draw(self):
self.game_display.fill(stg.BG_COLOR)
self.board.draw(self.game_display)
if self.drawing:
self.info_text1.draw(self.game_display)
elif self.prediction_text:
self.prediction_text.draw(self.game_display)
self.info_text2.draw(self.game_display)
pygame.display.update()
def guess(self):
# self.data = self.overwriting_data()
self.data = np.reshape(self.board.grid, (-1, 28, 28))
self.predictions = self.tf_model.predict(self.data)
self.prediction = (np.argmax(self.predictions[0]))
self.prediction_text = gui.TextWindow(stg.text2_x, stg.text2_y, stg.text2_w, stg.text2_h,
stg.text2_text_color, (f'{stg.text2_text}{self.prediction}'), stg.text2_font)
self.guessing = False
# def overwriting_data(self):
# mnist = tf.keras.datasets.mnist
# (x_train, y_train), (x_test, y_test) = mnist.load_data()
# x_train = tf.keras.utils.normalize(x_train, axis=1)
# x_test = tf.keras.utils.normalize(x_test, axis=1)
# for row in range(28):
# for col in range(28):
# x_test[0][row][col] = self.board.grid[row][col]
# plt.imshow(x_test[2])
# plt.show()
# print(x_test[2])
# print(self.board.grid)
# return x_test
game = Game()
while game.running:
game.new_guess()
pygame.quit()
| 31.590164
| 123
| 0.577582
| 495
| 3,854
| 4.339394
| 0.228283
| 0.02933
| 0.041899
| 0.035382
| 0.150372
| 0.081006
| 0.081006
| 0.047486
| 0.047486
| 0.047486
| 0
| 0.015837
| 0.311884
| 3,854
| 121
| 124
| 31.85124
| 0.794118
| 0.127919
| 0
| 0.222222
| 0
| 0
| 0.014644
| 0.009863
| 0
| 0
| 0
| 0
| 0
| 1
| 0.088889
| false
| 0
| 0.088889
| 0
| 0.188889
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
9b454cd03eb2a08f7263b1381a0130bff6f74d66
| 825
|
py
|
Python
|
examples/maskrcnn_fastai.py
|
ramaneswaran/mantisshrimp
|
d30c056f1f9f26a2ce42da73cfb32d591321f426
|
[
"Apache-2.0"
] | null | null | null |
examples/maskrcnn_fastai.py
|
ramaneswaran/mantisshrimp
|
d30c056f1f9f26a2ce42da73cfb32d591321f426
|
[
"Apache-2.0"
] | 8
|
2020-06-16T18:06:42.000Z
|
2020-09-15T22:35:56.000Z
|
examples/maskrcnn_fastai.py
|
ramaneswaran/mantisshrimp
|
d30c056f1f9f26a2ce42da73cfb32d591321f426
|
[
"Apache-2.0"
] | null | null | null |
from mantisshrimp import *
from mantisshrimp.hub.pennfundan import *
from mantisshrimp.engines.fastai import *
import albumentations as A
source = get_pennfundan_data()
parser = PennFundanParser(source)
splitter = RandomSplitter([0.8, 0.2])
train_records, valid_records = parser.parse(splitter)
train_transforms = AlbuTransform([A.Flip()])
train_dataset = Dataset(train_records, train_records)
valid_dataset = Dataset(valid_records)
model = MantisMaskRCNN(num_classes=2)
metric = COCOMetric(valid_records, bbox=True, mask=True)
train_dataloader = model.dataloader(train_dataset, batch_size=2, num_workers=2)
valid_dataloader = model.dataloader(valid_dataset, batch_size=2, num_workers=2)
learn = rcnn_learner(
dls=[train_dataloader, valid_dataloader], model=model, metrics=[metric]
)
learn.fine_tune(3, lr=2e-4)
| 28.448276
| 79
| 0.796364
| 110
| 825
| 5.754545
| 0.472727
| 0.075829
| 0.06951
| 0.053712
| 0.088468
| 0.088468
| 0.088468
| 0
| 0
| 0
| 0
| 0.016129
| 0.098182
| 825
| 28
| 80
| 29.464286
| 0.834677
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.210526
| 0
| 0.210526
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
9b465713a6d45355da79d52e919ae9d389a96675
| 10,649
|
py
|
Python
|
setup.py
|
mdavezac/bempp
|
bc573062405bda107d1514e40b6153a8350d5ab5
|
[
"BSL-1.0"
] | null | null | null |
setup.py
|
mdavezac/bempp
|
bc573062405bda107d1514e40b6153a8350d5ab5
|
[
"BSL-1.0"
] | null | null | null |
setup.py
|
mdavezac/bempp
|
bc573062405bda107d1514e40b6153a8350d5ab5
|
[
"BSL-1.0"
] | null | null | null |
from os.path import basename, dirname, join, abspath
from setuptools import setup, Extension
from distutils.command.build import build as dBuild
from setuptools.command.install import install as dInstall
from setuptools.command.build_ext import build_ext as dBuildExt
from setuptools.command.bdist_egg import bdist_egg as dBuildDistEgg
from setuptools.command.sdist import sdist as dSDist
from setuptools.command.egg_info import egg_info as dEggInfo
from distutils.dir_util import mkpath
source_dir = dirname(abspath(__file__))
package_dir = join(source_dir, 'pkg_install')
mkpath(package_dir)
def cmake_cache_line(variable, value, type='STRING'):
return "set(%s \"%s\" CACHE %s \"\")\n" % (variable, value, type)
def as_preload_file(name, info):
""" Python information to cmake commandline """
result = []
if len(info.get('libraries', [])):
libs = ('-l' + '-l'.join(info['libraries'])).rstrip().lstrip()
if len(info.get('library_dirs', [])):
libdirs = ('-L' + '-L'.join(info['library_dirs'])).rstrip().lstrip()
else: libdirs = ""
result.append(cmake_cache_line("%s_LIBRARIES" % name,
"%s %s" %(libdirs, libs)))
if len(info.get('include_dirs', [])):
incs = ';'.join(info['include_dirs']).rstrip().lstrip()
result.append(cmake_cache_line("%s_INCLUDE_DIRS" % name, incs))
return result
def cmake_executable():
""" Path to cmake executable """
from os.path import exists
from os import environ
from distutils.spawn import find_executable
cmake = find_executable('cmake')
if cmake is None and 'CASAPATH' in environ:
# Tries to out-smart CASA.
# Look places cmake might be that casa removes from path.
directories = [
join('/', 'usr', 'local', 'bin'),
join(environ['HOME'], 'bin'),
join(environ['HOME'], '.local', 'bin'),
join(environ['HOME'], 'usr', 'bin'),
join('/', 'sw', 'bin') # -- default Fink location
]
for directory in directories:
if exists(join(directory, 'cmake')):
cmake = join(directory, 'cmake')
break
if cmake is None:
raise RuntimeError('Could not find cmake executable in path')
return cmake
class Build(dBuild):
""" Build that runs cmake. """
description = "Compiles BEM++ using cmake"
user_options = dBuild.user_options + [
("external=", None, "Location for external packages")
]
def initialize_options(self):
self.external = None
dBuild.initialize_options(self)
def configure_cmdl(self, filename):
""" Creates cmake command-line
First puts variables into a cache file. This is safer that going through the
command-line.
"""
from sys import executable
# other args
other_args = [
cmake_cache_line('PYTHON_EXECUTABLE', executable, 'PATH'),
cmake_cache_line('NOEXPORT', 'TRUE', 'BOOL'),
cmake_cache_line('PYPACKED', 'TRUE', 'BOOL'),
]
if(self.external):
other_args.extend([
cmake_cache_line('EXTERNAL_ROOT', self.external, 'PATH'),
cmake_cache_line('CMAKE_PREFIX_PATH',
self.external + ";" + join(self.external, 'python', 'PyTrilinos'),
'PATH'
)
])
other_args.append('\n')
with open(filename, 'w') as file: file.writelines(other_args)
return ['-C%s' % filename]
def _configure(self, build_dir):
from distutils import log
from distutils.spawn import spawn
from os import chdir, getcwd
current_dir = getcwd()
mkpath(build_dir)
command_line = self.configure_cmdl(join(build_dir, 'Variables.cmake'))
log.info(
"CMake: configuring with variables in %s "
% join(build_dir, 'Variables.cmake')
)
cmake = cmake_executable()
try:
chdir(build_dir)
spawn([cmake] + command_line + [source_dir])
finally: chdir(current_dir)
def _build(self, build_dir):
from distutils import log
from distutils.spawn import spawn
from os import chdir, getcwd
log.info("CMake: building in %s" % build_dir)
current_dir = getcwd()
cmake = cmake_executable()
try:
chdir(build_dir)
spawn([cmake, '--build', '.'])
finally: chdir(current_dir)
def run(self):
from os.path import abspath
build_dir = join(dirname(abspath(__file__)), self.build_base)
self._configure(build_dir)
self._build(build_dir)
self._install(build_dir, package_dir)
try:
prior = getattr(self.distribution, 'running_binary', False)
dBuild.run(self)
finally: self.distribution.running_binary = prior
def _install(self, build_dir, install_dir):
from distutils import log
from distutils.sysconfig import PREFIX, get_python_lib
from sys import version_info
from os.path import abspath, relpath
from os import chdir, getcwd
libtopy = relpath(get_python_lib(), PREFIX)
if len(libtopy) > 2 and libtopy[:1] == '..':
libtopy = join(
'lib',
'python{0.major}.{0.minor}'.format(version_info),
'site-packages'
)
current_cwd = getcwd()
build_dir = abspath(build_dir)
cmake = cmake_executable()
install_dir = abspath(install_dir)
log.info("CMake: Installing package to %s" % install_dir)
try:
chdir(build_dir)
self.spawn([cmake,
'-DPYTHON_PKG_DIR=\'%s\'' % install_dir,
source_dir
])
self.spawn([cmake, '--build', '.', '--target', 'install'])
finally: chdir(current_cwd)
self.distribution.running_binary = True
class Install(dInstall):
def run(self):
from distutils import log
from os.path import abspath
from os import chdir, getcwd
self.distribution.run_command('build')
current_cwd = getcwd()
build_dir = join(dirname(abspath(__file__)), self.build_base)
cmake = cmake_executable()
pkg = abspath(self.install_lib)
log.info("CMake: Installing package to %s" % pkg)
try:
chdir(build_dir)
self.spawn([cmake,
'-DPYTHON_PKG_DIR=\'%s\'' % pkg,
'-DPYPACKED=TRUE',
'..'
])
self.spawn([cmake, '--build', '.', '--target', 'install'])
finally: chdir(current_cwd)
try:
prior = getattr(self.distribution, 'running_binary', False)
self.distribution.running_binary = True
self.distribution.have_run['egg_info'] = 0
dInstall.run(self)
finally: self.distribution.running_binary = prior
class BuildExt(dBuildExt):
def __init__(self, *args, **kwargs):
dBuildExt.__init__(self, *args, **kwargs)
def run(self):pass
class BuildDistEgg(dBuildDistEgg):
def __init__(self, *args, **kwargs):
dBuildDistEgg.__init__(self, *args, **kwargs)
def run(self):
try:
prior = getattr(self.distribution, 'running_binary', False)
self.distribution.running_binary = True
self.run_command('build')
dBuildDistEgg.run(self)
finally: self.distribution.running_binary = prior
class EggInfo(dEggInfo):
def __init__(self, *args, **kwargs):
dEggInfo.__init__(self, *args, **kwargs)
def run(self):
from setuptools.command.egg_info import manifest_maker
from os import listdir
which_template = 'MANIFEST.source.in'
dist = self.distribution
old_values = dist.ext_modules, dist.ext_package, \
dist.packages, dist.package_dir
if len(listdir(package_dir)) != 0 \
and getattr(self.distribution, 'running_binary', False):
which_template = 'MANIFEST.binary.in'
else:
dist.ext_modules, dist.ext_package = None, None
dist.packages, dist.package_dir = None, None
try:
old_template = manifest_maker.template
manifest_maker.template = which_template
dEggInfo.run(self)
finally:
manifest_maker.template = old_template
dist.ext_modules, dist.ext_package = old_values[:2]
dist.packages, dist.package_dir = old_values[2:]
class SDist(dSDist):
def __init__(self, *args, **kwargs):
dSDist.__init__(self, *args, **kwargs)
def run(self):
dist = self.distribution
try:
old_values = dist.ext_modules, dist.ext_package, \
dist.packages, dist.package_dir
dist.ext_modules, dist.ext_package = None, None
dist.packages, dist.package_dir = None, None
dSDist.run(self)
finally:
dist.ext_modules, dist.ext_package = old_values[:2]
dist.packages, dist.package_dir = old_values[2:]
setup(
name = "bempp",
version = "2.0",
setup_requires = ['numpy', 'pytest'],
install_requires = ['numpy', 'pytest'],
platforms = ['GNU/Linux','Unix','Mac OS-X'],
zip_safe = False,
cmdclass = {
'build': Build, 'install': Install,
'build_ext': BuildExt, 'bdist_egg': BuildDistEgg,
'egg_info': EggInfo
},
author = "Tim Betcke",
author_email = "t.betcke@ucl.ac.uk",
description = "BEMP does what it does well",
license = "MIT",
url = "https://github.com/bempp/bempp",
ext_modules = [Extension('bempp._core', [])],
ext_package = 'bempp',
packages = ['bempp', 'PyTrilinos'],
package_dir = {
'bempp': join(basename(package_dir), 'bempp'),
'PyTrilinos': join(basename(package_dir), 'PyTrilinos'),
},
include_package_data=True,
keywords= "mathology",
classifiers = [
'Development Status :: 0 - Beta',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Scientific/Engineering',
'Topic :: Scientific/Engineering :: Mathematics',
],
long_description = open(join(dirname(__file__), 'README'), 'r').read()
)
| 35.855219
| 88
| 0.596676
| 1,188
| 10,649
| 5.158249
| 0.21633
| 0.023499
| 0.037533
| 0.047324
| 0.351501
| 0.279373
| 0.252774
| 0.217852
| 0.202024
| 0.155679
| 0
| 0.002223
| 0.281811
| 10,649
| 296
| 89
| 35.976351
| 0.799032
| 0.030707
| 0
| 0.314961
| 0
| 0
| 0.138778
| 0.006724
| 0
| 0
| 0
| 0
| 0
| 1
| 0.070866
| false
| 0.003937
| 0.11811
| 0.003937
| 0.23622
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
9b4985b49cd879340b47fb541710318c2bb259dc
| 26,765
|
py
|
Python
|
demo/rrteAutoLog.py
|
CharlesWangYu/guitest
|
785f234321143a4a3f4afe92376dc4f138489b31
|
[
"BSD-3-Clause"
] | null | null | null |
demo/rrteAutoLog.py
|
CharlesWangYu/guitest
|
785f234321143a4a3f4afe92376dc4f138489b31
|
[
"BSD-3-Clause"
] | null | null | null |
demo/rrteAutoLog.py
|
CharlesWangYu/guitest
|
785f234321143a4a3f4afe92376dc4f138489b31
|
[
"BSD-3-Clause"
] | null | null | null |
import pdb
import logging
import os
import sys
import time
import subprocess
import xlrd
import win32gui
import win32api
import win32con
#import comtypes
from configparser import ConfigParser
from comtypes.client import *
from ctypes import *
def logTreeItem(node):
logging.info('----------------------------------------------------------')
logging.info('Parent\t: %s (%s)' % (node.elem.name, node.elem.ctrlType))
if node.left:
logging.info('Child[0]\t: %s (%s)' % (node.left.elem.name, node.left.elem.ctrlType))
curr = node.left.right
cnt = 1
while not curr is None:
logging.info('Child[%d]\t: %s (%s)' % (cnt, curr.elem.name, curr.elem.ctrlType))
cnt += 1
curr = curr.right
class UIA:
Client = GetModule('UIAutomationCore.dll')
IUIA = CreateObject('{ff48dba4-60ef-4201-aa87-54103eef594e}', interface=Client.IUIAutomation)
DesktopRoot = IUIA.GetRootElement()
@staticmethod
def findAllElem(start, key, type, flag=Client.PropertyConditionFlags_None, scope=Client.TreeScope_Descendants):
cnd = UIA.IUIA.CreatePropertyConditionEx(type, key, flag)
all = start.FindAll(scope, cnd)
#for x in range(0, all.Length):
#element = all.GetElement(x)
#logging.info('Element[%s] is searched.' % element.CurrentName)
return all
@staticmethod
def findFirstElem(start, key, type, flag=Client.PropertyConditionFlags_None, scope=Client.TreeScope_Descendants):
cnd = UIA.IUIA.CreatePropertyConditionEx(type, key, flag)
element = start.FindFirst(scope, cnd)
#logging.info('Element[%s] is searched.' % element.CurrentName)
return element
@staticmethod
def findFirstElem2And(start, key1, type1, key2, type2, flag=Client.PropertyConditionFlags_None, scope=Client.TreeScope_Descendants):
cnd1 = UIA.IUIA.CreatePropertyConditionEx(type1, key1, flag)
cnd2 = UIA.IUIA.CreatePropertyConditionEx(type2, key2, flag)
combine = UIA.IUIA.CreateAndCondition(cnd1, cnd2)
element = start.FindFirst(scope, combine)
#logging.info('Element[%s] is searched.' % element.CurrentName)
return element
@staticmethod
def findAllElem2Or(start, key1, type1, key2, type2, flag=Client.PropertyConditionFlags_None, scope=Client.TreeScope_Descendants):
cnd1 = UIA.IUIA.CreatePropertyConditionEx(type1, key1, flag)
cnd2 = UIA.IUIA.CreatePropertyConditionEx(type2, key2, flag)
combine = UIA.IUIA.CreateOrCondition(cnd1, cnd2)
all = start.FindAll(scope, combine)
#for x in range(0, all.Length):
#element = all.GetElement(x)
#logging.info('Element[%s] is searched.' % element.CurrentName)
return all
@staticmethod
def findAllElem4Or(start, key1, key2, key3, key4, type, flag=Client.PropertyConditionFlags_None, scope=Client.TreeScope_Descendants):
cnd1 = UIA.IUIA.CreatePropertyConditionEx(type, key1, flag)
cnd2 = UIA.IUIA.CreatePropertyConditionEx(type, key2, flag)
cnd3 = UIA.IUIA.CreatePropertyConditionEx(type, key3, flag)
cnd4 = UIA.IUIA.CreatePropertyConditionEx(type, key4, flag)
combine1 = UIA.IUIA.CreateOrCondition(cnd1, cnd2)
combine2 = UIA.IUIA.CreateOrCondition(cnd3, cnd4)
combine = UIA.IUIA.CreateOrCondition(combine1, combine2)
all = start.FindAll(scope, combine)
#for x in range(0, all.Length):
#element = all.GetElement(x)
#logging.info('Element[%s] is searched.' % element.CurrentName)
return all
def getParentElem(elem):
walker = UIA.IUIA.ControlViewWalker
parent = walker.GetParentElement(elem)
return parent
@staticmethod
def getNextSiblingElem(elem):
walker = UIA.IUIA.ControlViewWalker
element = walker.GetNextSiblingElement(elem)
return element
@staticmethod
def getPreviousSiblingElem(elem):
walker = UIA.IUIA.ControlViewWalker
element = walker.GetPreviousSiblingElement(elem)
return element
@staticmethod
def getElemSubText(elem):
text = UIA.findFirstElem(elem, UIA.Client.UIA_TextControlTypeId, UIA.Client.UIA_ControlTypePropertyId, scope=UIA.Client.TreeScope_Children)
return text.CurrentName
@staticmethod
def findElemBySubText(start, name, flag=Client.PropertyConditionFlags_None, scope=Client.TreeScope_Descendants):
child = UIA.findFirstElem(start, name, UIA.Client.UIA_NamePropertyId)
element = UIA.getParentElem(child)
return element
@staticmethod
def isUIAElem(elem):
try:
temp = elem.CurrentName
return True
except Exception as e:
return False
@staticmethod
def setEditbox(elem, text):
assert UIA.isUIAElem(elem)
pattern = elem.GetCurrentPattern(UIA.Client.UIA_ValuePatternId)
ctrl = cast(pattern, POINTER(UIA.Client.IUIAutomationValuePattern))
elem.SetFocus()
ctrl.SetValue(text)
@staticmethod
def expandTree(elem):
assert UIA.isUIAElem(elem)
pattern = elem.GetCurrentPattern(UIA.Client.UIA_ExpandCollapsePatternId)
ctrl = cast(pattern, POINTER(UIA.Client.IUIAutomationExpandCollapsePattern))
if ctrl.value.CurrentExpandCollapseState == UIA.Client.ExpandCollapseState_Collapsed:
ctrl.Select()
@staticmethod
def collapseTree(elem):
assert UIA.isUIAElem(elem)
pattern = elem.GetCurrentPattern(UIA.Client.UIA_ExpandCollapsePatternId)
ctrl = cast(pattern, POINTER(UIA.Client.IUIAutomationExpandCollapsePattern))
if ctrl.value.CurrentExpandCollapseState == UIA.Client.ExpandCollapseState_Expanded:
ctrl.Select()
@staticmethod
def isLeaf(elem):
assert UIA.isUIAElem(elem)
pattern = elem.GetCurrentPattern(UIA.Client.UIA_ExpandCollapsePatternId)
ctrl = cast(pattern, POINTER(UIA.Client.IUIAutomationExpandCollapsePattern))
return ctrl.value.CurrentExpandCollapseState == UIA.Client.ExpandCollapseState_LeafNode
@staticmethod
def pushLeaf(elem):
assert UIA.isUIAElem(elem)
pattern1 = elem.GetCurrentPattern(UIA.Client.UIA_ExpandCollapsePatternId)
ctrl1 = cast(pattern1, POINTER(UIA.Client.IUIAutomationExpandCollapsePattern))
pattern2 = elem.GetCurrentPattern(UIA.Client.UIA_SelectionItemPatternId)
ctrl2 = cast(pattern2, POINTER(UIA.Client.IUIAutomationSelectionItemPattern))
if ctrl1.value.CurrentExpandCollapseState == UIA.Client.ExpandCollapseState_LeafNode:
ctrl2.Select()
@staticmethod
def pushButton(elem):
assert UIA.isUIAElem(elem)
pattern = elem.GetCurrentPattern(UIA.Client.UIA_InvokePatternId)
ctrl = cast(pattern, POINTER(UIA.Client.IUIAutomationInvokePattern))
ctrl.Invoke()
@staticmethod
def selectTab(elem):
assert UIA.isUIAElem(elem)
pattern = elem.GetCurrentPattern(UIA.Client.UIA_SelectionItemPatternId)
ctrl = cast(pattern, POINTER(UIA.Client.IUIAutomationSelectionItemPattern))
if not ctrl.value.CurrentIsSelected:
ctrl.Select()
@staticmethod
def selectCheckbox(elem):
assert UIA.isUIAElem(elem)
pattern = elem.GetCurrentPattern(UIA.Client.UIA_TogglePatternId)
ctrl = cast(pattern, POINTER(UIA.Client.IUIAutomationTogglePattern))
if not ctrl.value.CurrentToggleState:
ctrl.Toggle()
@staticmethod
def unselectCheckbox(elem):
assert UIA.isUIAElem(elem)
pattern = elem.GetCurrentPattern(UIA.Client.UIA_TogglePatternId)
ctrl = cast(pattern, POINTER(UIA.Client.IUIAutomationTogglePattern))
if ctrl.value.CurrentToggleState:
ctrl.Toggle()
@staticmethod
def setDirInCommonDialog(dialog, path):
assert UIA.isUIAElem(dialog)
#logging.info('Reference file is %s.' % path)
edit = UIA.findFirstElem2And(dialog, UIA.Client.UIA_EditControlTypeId, UIA.Client.UIA_ControlTypePropertyId, 'Edit', UIA.Client.UIA_ClassNamePropertyId)
assert UIA.isUIAElem(edit)
okBtn = UIA.findFirstElem(dialog, '1', UIA.Client.UIA_AutomationIdPropertyId, scope=UIA.Client.TreeScope_Children)
assert UIA.isUIAElem(okBtn)
UIA.setEditbox(edit, path)
time.sleep(1)
UIA.pushButton(okBtn)
time.sleep(1)
class RRTE:
DELAY_RRET_START = 8
DELAY_WAIT_DLG = 3
NAME_RRTE_APP = 'Reference Run-time Environment'
NAME_TRACE_LEVEL = 'Microsoft.Windows.Controls.Ribbon.RibbonGallery Items.Count:1'
NAME_BROWSER_MODEL = 'Fdi.Client.Catalog.DeviceCatalogBrowserModel'
NAME_TOP_TAB = 'Fdi.Client.DeviceUi.ViewModel.DeviceUiHostContainerItemViewModel'
NAME_X_BTN = 'X'
NAME_ONLINE_PARAMS = 'OnlineParameters'
NAME_OFFLINE_TAB = 'Offline root menu'
NAME_ONLINE_TAB = 'Online'
NAME_DEVICE_ROOT_MENU = 'Device root menu'
NAME_DIAGNOSTIC_ROOT_MENU = 'Diagnostic root menu'
NAME_MAINT_ROOT_MENU = 'Maintenance root menu'
NAME_PROCESS_ROOT_MENU = 'Process variables root menu'
NAME_HEALTH_TAB = 'Health'
NAME_TREE_ROOT = 'DD_ExplorerView'
NAME_APPLY_BTN = 'Apply'
NAME_REVERT_BTN = 'Revert'
def __init__(self):
self.config = ConfigParser()
self.config.read('test.conf', encoding='UTF-8')
self.provider = None
self.tree = None
self.RRTERoot = None
self.RRTECurr = None
self.CurrTAB = None
# layer2
self.WorkRoot = None
# layer3
self.TopTAB = None # TAB for special fdi package
self.TopBtnX = None # button on the right side of top TAB
self.TopRoot = None # window to contain tab, menu tree, and param
# layer4
self.Offline = None # offline entry
self.OfflineX = None
self.TABRoot = None # root TABs' parent
# layer5
self.Online = None # online entry
self.OnlineX = None
self.Device = None # device root menu entry
self.DeviceX = None
self.Diagnose = None # diagnostic root menu entry
self.DiagnoseX = None
self.Maintena = None # maintenance root menu entry
self.MaintenaX = None
self.Process = None # process variable root menu entry
self.ProcessX = None
# layer4
self.Health = None
self.Explorer = None
self.TreeRoot = None # update after click root menu button
self.PaneRoot = None # update after click menu or window item
self.Apply = None
self.Revert = None
def start(self):
inputMode = self.config['MISC']['TEST_FILE_TYPE'].strip("'")
hostApp = self.config['MISC']['HOST_APP_PATH'].strip("'") + '\Reference Run-time Environment\Fdi.Reference.Client.exe'
testFile = self.config['MISC']['TEST_FILE'].strip("'")
outPath = self.config['MISC']['OUTPUT_PATH'].strip("'")
logPath = self.config['MISC']['RRTE_LOG_PATH'].strip("'")
execCmd = '\"' + hostApp + '\" -l \"' + testFile + '\"'
#os.system(execCmd)
self.provider = subprocess.Popen(execCmd, shell=True, stdout=subprocess.PIPE, close_fds=True)
#self.provider = subprocess.Popen(execCmd, shell=True, stdout=subprocess.PIPE)
#self.provider = subprocess.Popen(execCmd)
time.sleep(RRTE.DELAY_RRET_START)
logging.info('execCmd = %s' % execCmd)
#print('Please tuning the window size')
#pdb.set_trace()
# find layer1 element
self.RRTERoot = UIA.findFirstElem(UIA.DesktopRoot, RRTE.NAME_RRTE_APP, UIA.Client.UIA_NamePropertyId, scope=UIA.Client.TreeScope_Children)
assert UIA.isUIAElem(self.RRTERoot)
self.RRTECurr = self.RRTERoot
self.getBasicElem()
# create basic root menu node
onlineElem = Top('Online')
onlineElem.ctrlType = 'Top'
processElem = Top('Process variables root menu')
processElem.ctrlType = 'Top'
DiagElem = Top('Diagnostic root menu')
DiagElem.ctrlType = 'Top'
MaintElem = Top('Maintenance root menu')
MaintElem.ctrlType = 'Top'
DevElem = Top('Device root menu')
DevElem.ctrlType = 'Top'
rootNode = TreeNode(onlineElem)
processNode = TreeNode(processElem)
DiagNode = TreeNode(DiagElem)
MaintNode = TreeNode(MaintElem)
DevNode = TreeNode(DevElem)
rootNode.left = processNode
processNode.right = DiagNode
DiagNode.right = MaintNode
MaintNode.right = DevNode
rootNode.parent = None
processNode.parent = rootNode
DiagNode.parent = rootNode
MaintNode.parent = rootNode
DevNode.parent = rootNode
self.tree = Tree(rootNode, rootNode)
time.sleep(2)
def getBasicElem(self):
# find layer2 element(work area)
all = UIA.findAllElem(self.RRTERoot, UIA.Client.UIA_CustomControlTypeId, UIA.Client.UIA_ControlTypePropertyId, scope=UIA.Client.TreeScope_Children)
self.WorkRoot = all.GetElement(all.Length-1)
assert UIA.isUIAElem(self.WorkRoot)
# find layer3 element
self.TopTAB = UIA.findFirstElem(self.WorkRoot, RRTE.NAME_TOP_TAB, UIA.Client.UIA_NamePropertyId)
assert UIA.isUIAElem(self.TopTAB)
self.TopTABX = UIA.findFirstElem(self.TopTAB, RRTE.NAME_X_BTN, UIA.Client.UIA_NamePropertyId)
assert UIA.isUIAElem(self.TopTABX)
self.TopRoot = UIA.getNextSiblingElem(self.TopTABX)
assert UIA.isUIAElem(self.TopRoot)
# find layer4 element
self.Offline = UIA.findElemBySubText(self.TopRoot, RRTE.NAME_OFFLINE_TAB)
assert UIA.isUIAElem(self.Offline)
self.OfflineX = UIA.findFirstElem(self.Offline, RRTE.NAME_X_BTN, UIA.Client.UIA_NamePropertyId)
assert UIA.isUIAElem(self.OfflineX)
self.TABRoot = UIA.findFirstElem(self.TopRoot, RRTE.NAME_ONLINE_PARAMS, UIA.Client.UIA_AutomationIdPropertyId)
assert UIA.isUIAElem(self.TABRoot)
self.Health = UIA.getNextSiblingElem(self.TABRoot)
assert UIA.isUIAElem(self.Health)
self.Explorer = UIA.findFirstElem(self.TopRoot, RRTE.NAME_TREE_ROOT, UIA.Client.UIA_AutomationIdPropertyId)
assert UIA.isUIAElem(self.Explorer)
#self.TreeRoot = UIA.getNextSiblingElem(self.Explorer)
#self.PaneRoot = UIA.getNextSiblingElem(self.TreeRoot)
self.Apply = UIA.findFirstElem(self.TopRoot, RRTE.NAME_APPLY_BTN, UIA.Client.UIA_NamePropertyId)
assert UIA.isUIAElem(self.Apply)
self.Revert = UIA.findFirstElem(self.TopRoot, RRTE.NAME_REVERT_BTN, UIA.Client.UIA_NamePropertyId)
assert UIA.isUIAElem(self.Revert)
# layer5
self.Online = UIA.findElemBySubText(self.TABRoot, RRTE.NAME_ONLINE_TAB)
assert UIA.isUIAElem(self.Online)
self.OnlineX = UIA.findFirstElem(self.Online, RRTE.NAME_X_BTN, UIA.Client.UIA_NamePropertyId)
assert UIA.isUIAElem(self.OnlineX)
self.Device = UIA.findElemBySubText(self.TABRoot, RRTE.NAME_DEVICE_ROOT_MENU)
assert UIA.isUIAElem(self.Device)
self.DeviceX = UIA.findFirstElem(self.Device, RRTE.NAME_X_BTN, UIA.Client.UIA_NamePropertyId)
assert UIA.isUIAElem(self.DeviceX)
self.Diagnose = UIA.findElemBySubText(self.TABRoot, RRTE.NAME_DIAGNOSTIC_ROOT_MENU)
assert UIA.isUIAElem(self.Diagnose)
self.DiagnoseX = UIA.findFirstElem(self.Diagnose, RRTE.NAME_X_BTN, UIA.Client.UIA_NamePropertyId)
assert UIA.isUIAElem(self.DiagnoseX)
self.Maintena = UIA.findElemBySubText(self.TABRoot, RRTE.NAME_MAINT_ROOT_MENU)
assert UIA.isUIAElem(self.Maintena)
self.MaintenaX = UIA.findFirstElem(self.Maintena, RRTE.NAME_X_BTN, UIA.Client.UIA_NamePropertyId)
assert UIA.isUIAElem(self.MaintenaX)
self.Process = UIA.findElemBySubText(self.TABRoot, RRTE.NAME_PROCESS_ROOT_MENU)
assert UIA.isUIAElem(self.Process)
self.ProcessX = UIA.findFirstElem(self.Process, RRTE.NAME_X_BTN, UIA.Client.UIA_NamePropertyId)
assert UIA.isUIAElem(self.ProcessX)
def createNodeTree(self, selectedNode): # can't be 'Online' item
if selectedNode == None:
return
assert not selectedNode.elem.name == 'Online'
assert isinstance(selectedNode.elem, SelectableElement)
# get current node's path
path = []
path.append(selectedNode)
currNode = selectedNode
while not currNode.parent == None:
currNode = currNode.parent
path.append(currNode)
path.remove(self.tree.root)
path.reverse()
#for item in path:
# logging.info(item.elem.name)
# push button sequence to getting into current tree node
for item in path:
if isinstance(item.elem, SelectableElement):
item.elem.select(self)
if item.isEqual(selectedNode):
item.setChildren(self)
logTreeItem(item)
currNode = item.left
if isinstance(currNode.elem, SelectableElement):
self.createNodeTree(currNode)
currNode = currNode.right
while not currNode == None:
if isinstance(currNode.elem, SelectableElement):
self.createNodeTree(currNode)
currNode = currNode.right
time.sleep(2)
def loadPackage(self):
browser = UIA.findFirstElem(self.RRTERoot, RRTE.NAME_BROWSER_MODEL, UIA.Client.UIA_NamePropertyId)
assert UIA.isUIAElem(browser)
UIA.selectTab(browser)
time.sleep(2)
all = UIA.findAllElem(browser, UIA.Client.UIA_CustomControlTypeId, UIA.Client.UIA_ControlTypePropertyId, scope=UIA.Client.TreeScope_Children)
custom = all.GetElement(all.Length-1)
loadBtn = UIA.findFirstElem(custom, UIA.Client.UIA_ButtonControlTypeId, UIA.Client.UIA_ControlTypePropertyId)
assert UIA.isUIAElem(loadBtn)
UIA.pushButton(loadBtn)
logging.info('Load FDI package')
time.sleep(2)
self.getBasicElem()
time.sleep(1)
#self.wait()
def closeMenu(self):
# find layer2 element(work area)
all = UIA.findAllElem(self.RRTERoot, UIA.Client.UIA_CustomControlTypeId, UIA.Client.UIA_ControlTypePropertyId, scope=UIA.Client.TreeScope_Children)
self.WorkRoot = all.GetElement(all.Length-1)
assert UIA.isUIAElem(self.WorkRoot)
# find layer3 element
self.TopTAB = UIA.findFirstElem(self.WorkRoot, RRTE.NAME_TOP_TAB, UIA.Client.UIA_NamePropertyId)
assert UIA.isUIAElem(self.TopTAB)
self.TopTABX = UIA.findFirstElem(self.TopTAB, RRTE.NAME_X_BTN, UIA.Client.UIA_NamePropertyId)
assert UIA.isUIAElem(self.TopTABX)
UIA.pushButton(self.TopTABX)
logging.info('Close Menu')
time.sleep(4)
#self.wait()
def close(self):
'''
titleBar = UIA.findFirstElem(self.RRTERoot, UIA.Client.UIA_TitleBarControlTypeId, UIA.Client.UIA_ControlTypePropertyId, scope=UIA.Client.TreeScope_Children)
pdb.set_trace()
assert UIA.isUIAElem(titleBar)
closeBtn = UIA.findFirstElem(titleBar, 'Close', UIA.Client.UIA_AutomationIdPropertyId, scope=UIA.Client.TreeScope_Children)
assert UIA.isUIAElem(closeBtn)
UIA.pushButton(closeBtn)
'''
hwnd = win32gui.FindWindow(None, RRTE.NAME_RRTE_APP)
win32gui.SetForegroundWindow(hwnd)
win32api.keybd_event(win32con.VK_MENU, 0, 0, 0)
win32api.keybd_event(win32con.VK_F4, 0, 0, 0)
win32api.keybd_event(win32con.VK_F4, 0, win32con.KEYEVENTF_KEYUP, 0)
win32api.keybd_event(win32con.VK_MENU, 0, win32con.KEYEVENTF_KEYUP, 0)
time.sleep(4)
#self.provider.terminate()
#time.sleep(4)
logging.info('Close RRTE')
def setTraceLevel(self, level):
item = UIA.findFirstElem2And(self.RRTERoot, level, UIA.Client.UIA_NamePropertyId, UIA.Client.UIA_ListItemControlTypeId, UIA.Client.UIA_ControlTypePropertyId)
assert UIA.isUIAElem(item)
UIA.selectTab(item)
time.sleep(3)
logging.info('Set trace level')
def clearRegistLog(self):
logPath = self.config['MISC']['RRTE_LOG_PATH'].strip("'")
os.system('del /F /S /Q ' + logPath + '\DMS.log')
os.system('del /F /S /Q ' + logPath + '\FdiContainer.log')
os.system('del /F /S /Q ' + logPath + '\HARTModemDriver.log')
os.system('del /F /S /Q ' + logPath + '\ReferenceHost.log')
os.system('del /F /S /Q ' + logPath + '\Trace.log')
time.sleep(1)
logging.info('Clear register log files')
def clearOutput(self):
outPath = self.config['MISC']['OUTPUT_PATH'].strip("'")
outPath += '\\rrte'
execCmd = 'rmdir /S /Q "' + outPath + '"'
os.system(execCmd)
time.sleep(2)
execCmd = 'mkdir "' + outPath + '"'
os.system(execCmd)
time.sleep(2)
logging.info('Clear already existing output log files')
def traversal(self, targetNode):
if not targetNode is None:
if isinstance(targetNode.elem, Window): # TODO: Page
self.createRegistLog(targetNode)
self.traversal(targetNode.left)
self.traversal(targetNode.right)
def createRegistLog(self, node):
assert not node.elem.name == 'Online'
assert isinstance(node.elem, Window)
outPath = self.config['MISC']['OUTPUT_PATH'].strip("'")
logPath = self.config['MISC']['RRTE_LOG_PATH'].strip("'")
# get path
path = []
path.append(node)
currNode = node
while not currNode.parent == None:
currNode = currNode.parent
path.append(currNode)
path.remove(path[len(path)-1])
path.reverse()
# start RRTE and go into target window item
#self.start()
self.loadPackage()
self.setTraceLevel('Information') # Verbose
pathName = outPath + '\\rrte'
for item in path:
item.elem.select(self)
pathName += '\\' + item.elem.name
execCmd = 'mkdir "' + pathName + '"'
os.system(execCmd)
time.sleep(3) # wait question mark dispear (create log)
#self.close()
self.closeMenu()
# make folder and copy log files
execCmd = 'copy "' + logPath + '\\*.*" "' + pathName + '"'
os.system(execCmd)
time.sleep(1)
self.clearRegistLog()
def wait(self, dlgFindType=UIA.Client.UIA_ControlTypePropertyId, dlgFindKey=UIA.Client.UIA_WindowControlTypeId):
dialog = UIA.findFirstElem(self.RRTERoot, dlgFindKey, dlgFindType)
while not UIA.isUIAElem(dialog):
time.sleep(RRTE.DELAY_WAIT_DLG)
dialog = UIA.findFirstElem(self.RRTERoot, dlgFindKey, dlgFindType)
return dialog
class TreeNode:
def __init__(self, elem, parent=None, left=None, right=None):
self.elem = elem
self.parent = parent # It's logic parent node in tree, not in binary tree
self.left = left
self.right = right
def setChildren(self, rrte):
elems = self.elem.children(rrte)
if len(elems) > 0:
self.left = TreeNode(elems[0], self)
currNode = self.left
for x in range(1, len(elems)):
currNode.right = TreeNode(elems[x], self)
currNode = currNode.right
def isEqual(self, ref):
node1 = self
node2 = ref
while not (node1 == None and node2 == None):
if node1.elem.name == node2.elem.name:
node1 = node1.parent
node2 = node2.parent
else:
return False
return True
class Tree: # It's logic tree, not control view tree in ui automation
def __init__(self, root=None, curr=None):
self.root = root
self.curr = curr
'''
def addChild(self, child, parent): # insert node under the current node
if parent == None and self.root == None:
self.root = child
child.parent = None
else:
child.parent = parent
if self.curr.left == None:
self.curr.left = child
else:
currNode = self.curr.left
while (not currNode.right == None):
currNode = currNode.right
currNode.right = child
def preorderScreen(self): # traverse to page node in tree items
pass
'''
class Element: # abstract class
def __init__(self, name):
self.name = name
self.ctrlType = None
self.rectangle = None
def children(self, rrte):
pass
# RRTE element class
class RRTEElement(Element): # abstract class (not used in demo)
pass
class SelectableElement(Element): # abstract class
def select(self, rrte):
pass
class Top(SelectableElement):
def select(self, rrte):
btn = UIA.findElemBySubText(rrte.TABRoot, self.name)
UIA.pushButton(btn)
time.sleep(4)
rrte.Explorer = UIA.findFirstElem(rrte.TopRoot, RRTE.NAME_TREE_ROOT, UIA.Client.UIA_AutomationIdPropertyId)
rrte.TreeRoot = UIA.getNextSiblingElem(rrte.Explorer)
rrte.RRTECurr = rrte.TreeRoot
def children(self, rrte):
all = UIA.findAllElem(rrte.RRTECurr, UIA.Client.UIA_TreeItemControlTypeId, UIA.Client.UIA_ControlTypePropertyId, scope=UIA.Client.TreeScope_Children)
set = []
for x in range(0, all.Length):
item = all.GetElement(x)
name = UIA.getElemSubText(item)
if UIA.isLeaf(item):
elem = Window(name)
else:
elem = Menu(name)
elem.ctrlType = 'TreeItem'
elem.rectangle = item.CurrentBoundingRectangle
set.append(elem)
return set
class Menu(SelectableElement):
def select(self, rrte):
tree = UIA.findElemBySubText(rrte.RRTECurr, self.name)
UIA.expandTree(tree)
time.sleep(2)
rrte.Explorer = UIA.findFirstElem(rrte.TopRoot, RRTE.NAME_TREE_ROOT, UIA.Client.UIA_AutomationIdPropertyId)
rrte.TreeRoot = UIA.getNextSiblingElem(rrte.Explorer)
rrte.PaneRoot = UIA.getNextSiblingElem(rrte.TreeRoot)
rrte.RRTECurr = tree
def children(self, rrte):
all = findAllElem(rrte.RRTECurr, UIA.Client.UIA_TreeItemControlTypeId, UIA.Client.UIA_ControlTypePropertyId, scope=UIA.Client.TreeScope_Children)
set = []
for x in range(0, all.Length):
item = all.GetElement(x)
elem = Window(UIA.getElemSubText(item))
elem.ctrlType = 'TreeItem'
elem.rectangle = item.CurrentBoundingRectangle
set.append(elem)
return set
class Window(SelectableElement):
def select(self, rrte):
leaf = UIA.findElemBySubText(rrte.RRTECurr, self.name)
UIA.pushLeaf(leaf)
time.sleep(2)
rrte.Explorer = UIA.findFirstElem(rrte.TopRoot, RRTE.NAME_TREE_ROOT, UIA.Client.UIA_AutomationIdPropertyId)
rrte.TreeRoot = UIA.getNextSiblingElem(rrte.Explorer)
rrte.PaneRoot = UIA.getNextSiblingElem(rrte.TreeRoot)
rrte.RRTECurr = rrte.PaneRoot
def children(self, rrte):
all = UIA.findAllElem4Or(rrte.RRTECurr, UIA.Client.UIA_CustomControlTypeId, UIA.Client.UIA_ButtonControlTypeId, UIA.Client.UIA_GroupControlTypeId, UIA.Client.UIA_TabControlTypeId, UIA.Client.UIA_ControlTypePropertyId, scope=UIA.Client.TreeScope_Children)
set = []
for x in range(0, all.Length):
item = all.GetElement(x)
if item.CurrentControlType == UIA.Client.UIA_CustomControlTypeId: # variable
elem = self.createParam(item)
elem.ctrlType = 'Custom'
elif item.CurrentControlType == UIA.Client.UIA_ButtonControlTypeId: # method
elem = Method(UIA.getElemSubText(item))
elem.ctrlType = 'Button'
elif item.CurrentControlType == UIA.Client.UIA_GroupControlTypeId: # group
elem = Group(UIA.getElemSubText(item))
elem.ctrlType = 'Group'
elif item.CurrentControlType == UIA.Client.UIA_TabControlTypeId: # tab
elem = Page('')
elem.ctrlType = 'Tab'
elem.rectangle = item.CurrentBoundingRectangle
set.append(elem)
return set
def createParam(self, uiaElem):
editbox = UIA.findFirstElem(uiaElem, 'Value', UIA.Client.UIA_AutomationIdPropertyId, scope=UIA.Client.TreeScope_Children)
if not UIA.isUIAElem(editbox):
group = UIA.findFirstElem(uiaElem, UIA.Client.UIA_GroupControlTypeId, UIA.Client.UIA_ControlTypePropertyId, scope=UIA.Client.TreeScope_Children)
return BitEnum(UIA.getElemSubText(group))
elif editbox.CurrentControlType == UIA.Client.UIA_EditControlTypeId:
return Data(UIA.getElemSubText(uiaElem))
elif editbox.CurrentControlType == UIA.Client.UIA_ComboBoxControlTypeId:
return Enum(UIA.getElemSubText(uiaElem))
class Page(SelectableElement):
def select(self, rrte):
tab = UIA.findElemBySubText(rrte.PaneRoot, self.name)
UIA.selectTab(tab)
time.sleep(3)
rrte.Explorer = UIA.findFirstElem(rrte.TopRoot, RRTE.NAME_TREE_ROOT, UIA.Client.UIA_AutomationIdPropertyId)
rrte.TreeRoot = UIA.getNextSiblingElem(rrte.Explorer)
rrte.PaneRoot = UIA.getNextSiblingElem(rrte.TreeRoot)
rrte.RRTECurr = tab
class Group(Element):
pass
class Param(Element): # abstract class
def __init__(self, label, mode='RO', edit='None', unit=''):
Element.__init__(self, label)
self.mode = mode
self.edit = edit
self.unit = unit
class Method(Element):
pass
class Data(Param):
pass
class Enum(Param):
pass
class BitEnum(Param):
pass
if __name__ == '__main__':
#pdb.set_trace()
logging.basicConfig(level = logging.INFO)
# get hart register log from RRTE
rrte = RRTE()
rrte.clearOutput()
rrte.start()
rrte.createNodeTree(rrte.tree.root.left)
#pdb.set_trace()
rrte.closeMenu()
#rrte.close()
rrte.clearRegistLog()
rrte.traversal(rrte.tree.root.left)
#rrte.close()
| 37.173611
| 256
| 0.74986
| 3,325
| 26,765
| 5.956692
| 0.133835
| 0.044986
| 0.043017
| 0.027769
| 0.48662
| 0.44431
| 0.382662
| 0.337675
| 0.321165
| 0.303241
| 0
| 0.007842
| 0.132897
| 26,765
| 719
| 257
| 37.225313
| 0.84557
| 0.092023
| 0
| 0.318966
| 0
| 0
| 0.053634
| 0.012092
| 0
| 0
| 0
| 0.001391
| 0.075862
| 1
| 0.087931
| false
| 0.013793
| 0.022414
| 0
| 0.215517
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
9b4a434127e56ce9f10a860ea190256cba7c7ff6
| 2,592
|
py
|
Python
|
project-euler/261/euler_261_v3.py
|
zoffixznet/project-euler
|
39921379385ae2521354c7266a541c46785e85a2
|
[
"MIT"
] | null | null | null |
project-euler/261/euler_261_v3.py
|
zoffixznet/project-euler
|
39921379385ae2521354c7266a541c46785e85a2
|
[
"MIT"
] | null | null | null |
project-euler/261/euler_261_v3.py
|
zoffixznet/project-euler
|
39921379385ae2521354c7266a541c46785e85a2
|
[
"MIT"
] | null | null | null |
# The Expat License
#
# Copyright (c) 2017, Shlomi Fish
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import sys
if sys.version_info > (3,):
long = int
xrange = range
LIM = 10000000000
def find_pivots():
initial_k = 2
s_k = 1*1+2*2
kdk = 1
kddk = 1
d = 8
STEP = 10000000
c = STEP
ldm = (((initial_k-1) << 1) - 1)
ksn = 2*2
dksn = 3
for k in xrange(initial_k, LIM+1):
if k == c:
print("Reached %d" % k)
sys.stdout.flush()
c += STEP
while ksn < s_k:
dksn += 2
ksn += dksn
sm = s_n = nsq = ksn
ldm += 2
dm = dnsq = dksn
dnsq += 2
ss_k = s_k
dk = kdk
kddk += 2
ddk = kddk
kdk += kddk
for m in xrange(2, k+1):
while s_n > ss_k and dm > ldm:
sm -= dm
dm -= 2
s_n += sm - nsq
dnsq -= 2
nsq -= dnsq
if s_n == ss_k:
print("Found %d" % k)
print(">>> S[ %d .. %d ; %d] = S[ %d .. %d ; %d] ( %d )" %
(k-m+1, k, m, ((dm + 1) >> 1), ((dnsq - 1) >> 1), m-1,
((dm + 1) >> 1) - k))
sys.stdout.flush()
if dm <= ldm:
break
ddk -= 2
dk -= ddk
ss_k += dk
nsq += dnsq
dnsq += 2
s_n += nsq
s_k += d
d += 4
def main():
find_pivots()
if __name__ == "__main__":
main()
| 28.483516
| 79
| 0.531636
| 366
| 2,592
| 3.691257
| 0.396175
| 0.065137
| 0.006662
| 0.022206
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.035431
| 0.368441
| 2,592
| 90
| 80
| 28.8
| 0.78986
| 0.412809
| 0
| 0.066667
| 0
| 0.016667
| 0.049333
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.033333
| false
| 0
| 0.016667
| 0
| 0.05
| 0.05
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
9b4addb7383ad54fbd74f3fafd29a6bba825287f
| 4,331
|
py
|
Python
|
Naive_Bayes/lib/naive_bayes.py
|
DavexPro/MLStudy
|
869ab4e569fe1cde1a3a6238977282b81a71fc81
|
[
"MIT"
] | null | null | null |
Naive_Bayes/lib/naive_bayes.py
|
DavexPro/MLStudy
|
869ab4e569fe1cde1a3a6238977282b81a71fc81
|
[
"MIT"
] | null | null | null |
Naive_Bayes/lib/naive_bayes.py
|
DavexPro/MLStudy
|
869ab4e569fe1cde1a3a6238977282b81a71fc81
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# coding=utf-8
# -*- utf8 -*-
# author=dave.fang@outlook.com
# create=20170330
import math
from lib.log import LOGGER
class NaiveBayes:
def __init__(self):
# 元数据
self.data_set = []
# 分类后的数据
self.separate_set = {}
# 分类后, 性别的先验概率
self.separate_prior = {}
# 分类数据的参数分类
self.separate_probability = {}
def import_data(self, file_path):
"""
导入元数据
:param file_path:
:return:
"""
LOGGER.info('样例数据导入路径: {0}'.format(file_path))
file_handle = open(file_path)
file_content = file_handle.read().split('\n')
for line in file_content:
if line.strip() == '' or line[0] == '#':
continue
tmp_set = line.split(' ')
self.data_set.append(tmp_set)
LOGGER.info('样例数据集导入完成...')
def separate_data(self):
"""
分类样例数据, 并计算性别的先验概率
:return:
"""
LOGGER.info('开始分类样例数据...')
# 根据性别对样例数据进行分类
for one_set in self.data_set:
if one_set[0] not in self.separate_set:
self.separate_set[one_set[0]] = []
self.separate_probability[one_set[0]] = []
self.separate_prior[one_set[0]] = 0
self.separate_set[one_set[0]].append(one_set)
# 计算性别的先验概率 P(gender)
for one_prior in self.separate_prior:
self.separate_prior[one_prior] = len(self.separate_set[one_prior]) / len(self.data_set)
LOGGER.info('样例数据分类完成...')
def analyse_data(self):
"""
对分类数据进行再次加工, 方便后续的取均值以及标准差的计算
:return:
"""
for one_separate in self.separate_set:
self.separate_probability[one_separate] = {
'height': [],
'weight': [],
'foot': []
}
for one_set in self.separate_set[one_separate]:
self.separate_probability[one_separate]['height'].append(float(one_set[1]))
self.separate_probability[one_separate]['weight'].append(float(one_set[2]))
self.separate_probability[one_separate]['foot'].append(float(one_set[3]))
def classify(self, height, weight, foot):
"""
根据所给数据进行分类, 并给出判断
:param height:
:param weight:
:param foot:
:return: 性别
"""
LOGGER.info('数据分类: 身高{0}英尺 / 体重{1}磅 / 脚掌{2}英寸'.format(height, weight, foot))
category = {}
for one_separate in self.separate_set:
# 计算所给身高在该类别(性别)的概率密度
pro_height = calc_probability(height, calc_mean(self.separate_probability[one_separate]['height']),
calc_stdev(self.separate_probability[one_separate]['height']))
# 计算所给体重在该类别(性别)的概率密度
pro_weight = calc_probability(weight, calc_mean(self.separate_probability[one_separate]['weight']),
calc_stdev(self.separate_probability[one_separate]['weight']))
# 计算所给脚长在该类别(性别)的概率密度
pro_foot = calc_probability(foot, calc_mean(self.separate_probability[one_separate]['foot']),
calc_stdev(self.separate_probability[one_separate]['foot']))
category[one_separate] = self.separate_prior[one_separate] * pro_height * pro_weight * pro_foot
# 两个概率比较, 取最大值作为我们最后分类的结果
if category['0'] / category['1'] > 1:
LOGGER.info('女性 / 女性的概率比男性的概率高{0}倍'.format(round(category['0'] / category['1'])))
else:
LOGGER.info('男性 / 男性的概率比女性的概率高{0}倍'.format(round(category['1'] / category['0'])))
def calc_mean(numbers):
"""
计算一组数的均值
:param numbers:
:return: 均值
"""
return sum(numbers) / float(len(numbers))
def calc_stdev(numbers):
"""
计算一组数的标准差
:param numbers:
:return: 标准差
"""
avg = calc_mean(numbers)
variance = sum([pow(x - avg, 2) for x in numbers]) / float(len(numbers) - 1)
return math.sqrt(variance)
def calc_probability(x, mean, stdev):
"""
计算概率密度函数的值
:param x:
:param mean:
:param stdev:
:return:
"""
exponent = math.exp(-(math.pow(x - mean, 2) / (2 * math.pow(stdev, 2))))
return (1 / (math.sqrt(2 * math.pi) * stdev)) * exponent
| 31.384058
| 111
| 0.563842
| 487
| 4,331
| 4.821355
| 0.258727
| 0.127768
| 0.117547
| 0.121806
| 0.285775
| 0.241908
| 0.135009
| 0
| 0
| 0
| 0
| 0.013563
| 0.302009
| 4,331
| 137
| 112
| 31.613139
| 0.763149
| 0.126991
| 0
| 0.03125
| 0
| 0
| 0.055194
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.125
| false
| 0
| 0.046875
| 0
| 0.234375
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
9b4d709976efa66a27e01fd0d18980bfda0f6d63
| 1,004
|
py
|
Python
|
views/sprite_views/combo_attack_sprite.py
|
kuyu12/pygame_fight_game
|
3bbc286b9f33c6d6d9db9bea21f9b7af15247df5
|
[
"MIT"
] | 1
|
2020-08-03T07:54:59.000Z
|
2020-08-03T07:54:59.000Z
|
views/sprite_views/combo_attack_sprite.py
|
kuyu12/pygame_fight_game
|
3bbc286b9f33c6d6d9db9bea21f9b7af15247df5
|
[
"MIT"
] | null | null | null |
views/sprite_views/combo_attack_sprite.py
|
kuyu12/pygame_fight_game
|
3bbc286b9f33c6d6d9db9bea21f9b7af15247df5
|
[
"MIT"
] | null | null | null |
from views.sprite_views.attack_sprite import AttackSprite, AttackState
from views.sprite_views.movement_sprite import Direction, State
class ComboAttackSprite(AttackSprite):
def __init__(self, player_data, combo_attack, on_finish):
self.combo_attack = combo_attack
self.combo_images = player_data.get_combo_images(combo_attack.combo_type)
self.attack_state = AttackState.COMBO
loc = (player_data.location[0] + combo_attack.off_set[0],player_data.location[1] + combo_attack.off_set[1])
super().__init__(loc, player_data.bounds_size)
self.directions[player_data.faceDirection] = True
self.is_blocking_move = True
self.finish_block_state = State.DEAD
if player_data.faceDirection == Direction.RIGHT:
self.move_x = combo_attack.move_x
else:
self.move_x = -combo_attack.move_x
self.on_dead_animation_finish = on_finish
def load_images(self):
self.images = self.combo_images
| 37.185185
| 115
| 0.721116
| 132
| 1,004
| 5.106061
| 0.348485
| 0.130564
| 0.04451
| 0.059347
| 0.074184
| 0.074184
| 0.074184
| 0
| 0
| 0
| 0
| 0.004963
| 0.197211
| 1,004
| 26
| 116
| 38.615385
| 0.831266
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.105263
| false
| 0
| 0.105263
| 0
| 0.263158
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
9b4fcde6493e496f8266585c418f51bc87980875
| 6,457
|
py
|
Python
|
payctl/utils.py
|
arjanz/substrate-payctl
|
57d13155ffd8e200d54ec4d4382a09b757fa8211
|
[
"MIT"
] | null | null | null |
payctl/utils.py
|
arjanz/substrate-payctl
|
57d13155ffd8e200d54ec4d4382a09b757fa8211
|
[
"MIT"
] | null | null | null |
payctl/utils.py
|
arjanz/substrate-payctl
|
57d13155ffd8e200d54ec4d4382a09b757fa8211
|
[
"MIT"
] | null | null | null |
from substrateinterface import SubstrateInterface, Keypair
from substrateinterface.utils.ss58 import ss58_encode, ss58_decode
#
# get_config - Get a default and validator specific config elements from args and config.
#
def get_config(args, config, key, section='Defaults'):
if vars(args)[key] is not None:
return vars(args)[key]
if config[section].get(key) is not None:
return config[section].get(key)
return config['Defaults'].get(key)
#
# get_eras_rewards_point - Collect the ErasRewardPoints (total and invididual) for a given range of eras.
#
def get_eras_rewards_point(substrate, start, end):
eras_rewards_point = {}
for era in range(start, end):
reward_points = substrate.query(
module='Staking',
storage_function='ErasRewardPoints',
params=[era]
)
try:
eras_rewards_point[era] = {}
eras_rewards_point[era]['total'] = reward_points.value['total']
eras_rewards_point[era]['individual'] = {}
for reward_points_item in reward_points.value['individual']:
eras_rewards_point[era]['individual'][reward_points_item[0]] = reward_points_item[1]
except:
continue
return eras_rewards_point
#
# get_eras_validator_rewards - Collect the ErasValidatorReward for a given range of eras.
#
def get_eras_validator_rewards(substrate, start, end):
eras_validator_rewards = {}
for era in range(start, end):
validator_rewards = substrate.query(
module='Staking',
storage_function='ErasValidatorReward',
params=[era]
)
try:
eras_validator_rewards[era] = validator_rewards.value
except:
continue
return eras_validator_rewards
#
# get_eras_payment_info - Combine information from ErasRewardPoints and ErasValidatorReward for given
# range of eras to repor the amount of per validator instead of era points.
#
def get_eras_payment_info(substrate, start, end):
eras_rewards_point = get_eras_rewards_point(substrate, start, end)
eras_validator_rewards = get_eras_validator_rewards(substrate, start, end)
eras_payment_info = {}
for era in list(set(eras_rewards_point.keys()) & set(eras_validator_rewards.keys())):
total_points = eras_rewards_point[era]['total']
for validatorId in eras_rewards_point[era]['individual']:
total_reward = eras_validator_rewards[era]
if total_reward is not None:
eras_rewards_point[era]['individual'][validatorId] *= (total_reward/total_points)
eras_payment_info[era] = eras_rewards_point[era]['individual']
return eras_payment_info
#
# get_eras_payment_info_filtered - Similar than get_eras_payment_info but applying some filters;
# 1 . Include only eras containing given acconts.
# 2 . Include only eras containing unclaimed rewards.
#
# NOTE: The returned structure is slighly different than
# get_eras_payment_info
#
def get_eras_payment_info_filtered(substrate, start, end, accounts=[], unclaimed=False):
eras_paymemt_info_filtered = {}
eras_paymemt_info = get_eras_payment_info(substrate, start, end)
accounts_ledger = get_accounts_ledger(substrate, accounts)
for era in eras_paymemt_info:
for accountId in accounts:
if accountId in eras_paymemt_info[era]:
if era in accounts_ledger[accountId]['claimedRewards']:
if unclaimed == True:
continue
claimed = True
else:
claimed = False
if era not in eras_paymemt_info_filtered:
eras_paymemt_info_filtered[era] = {}
eras_paymemt_info_filtered[era][accountId] = {}
amount = eras_paymemt_info[era][accountId] / (10**substrate.token_decimals)
eras_paymemt_info_filtered[era][accountId]['claimed'] = claimed
eras_paymemt_info_filtered[era][accountId]['amount'] = amount
return eras_paymemt_info_filtered
#
# get_included_accounts - Get the list (for the filtering) of included accounts from the args and config.
#
def get_included_accounts(substrate, args, config):
included_accounts = []
if len(args.validators) != 0:
for validator in args.validators:
included_accounts.append(validator)
else:
for section in config.sections():
if section == 'Defaults':
continue
included_accounts.append(section)
return included_accounts
#
# get_accounts_ledger - Collect the Ledger for a given list of accounts.
#
def get_accounts_ledger(substrate, accounts):
accounts_ledger = {}
for account in accounts:
try:
controller_account = substrate.query(
module='Staking',
storage_function='Bonded',
params=[accounts[0]]
)
ledger = substrate.query(
module='Staking',
storage_function='Ledger',
params=[controller_account.value]
)
accounts_ledger[account] = ledger.value
except:
continue
return accounts_ledger
#
# get_keypair - Generate a Keypair from args and config.
#
def get_keypair(args, config):
enabled_signing_methods = config['Defaults'].keys() & {'signingseed', 'signingmnemonic', 'signinguri'}
if (len(enabled_signing_methods) != 1):
return None
signing_method = list(enabled_signing_methods)[0]
if signing_method == 'signingseed':
keypair = Keypair.create_from_seed(config['Defaults'].get('signingseed'))
if signing_method == 'signingmnemonic':
keypair = Keypair.create_from_mnemonic(config['Defaults'].get('signingmnemonic'))
if signing_method == 'signinguri':
keypair = Keypair.create_from_uri(config['Defaults'].get('signinguri'))
return keypair
#
# get_nonce - Get the next nonce to be used on a signature for a given account.
#
def get_nonce(substrate, account):
account_info = substrate.query(
module='System',
storage_function='Account',
params=[account]
)
return account_info.value['nonce']
| 32.447236
| 106
| 0.647205
| 721
| 6,457
| 5.545076
| 0.183079
| 0.041271
| 0.06003
| 0.038019
| 0.32066
| 0.198349
| 0.110555
| 0.053527
| 0.015008
| 0
| 0
| 0.003381
| 0.266997
| 6,457
| 198
| 107
| 32.611111
| 0.841327
| 0.18507
| 0
| 0.177966
| 0
| 0
| 0.068669
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.076271
| false
| 0
| 0.016949
| 0
| 0.194915
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
9b52940acbc894f83db7c38174cba4aed0fe37b6
| 7,279
|
py
|
Python
|
Notepad++/programfiles/Notepad++/plugins/PythonScript/scripts/Samples/LogfileLexer.py
|
slim71/Utils
|
5fcb0ec604cc039668f132c102d6bd2050bcea5c
|
[
"Unlicense"
] | null | null | null |
Notepad++/programfiles/Notepad++/plugins/PythonScript/scripts/Samples/LogfileLexer.py
|
slim71/Utils
|
5fcb0ec604cc039668f132c102d6bd2050bcea5c
|
[
"Unlicense"
] | null | null | null |
Notepad++/programfiles/Notepad++/plugins/PythonScript/scripts/Samples/LogfileLexer.py
|
slim71/Utils
|
5fcb0ec604cc039668f132c102d6bd2050bcea5c
|
[
"Unlicense"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
LogfileLexer - Demo
Highlights whole lines which matches the regex.
Usage:
Load a logfile, modify error and warning regex and
run script to see how it works
Note: By commenting or deleting everything, including,
the <comment_or_delete> tags it is ready to be used
as an additional lexer
"""
from Npp import editor, notepad, LEXER, SCINTILLANOTIFICATION, NOTIFICATION, LANGTYPE
import re
try:
# on first run this will generate an NameError exception
LOGFILE_LEXER().main()
except NameError:
class LOGFILE_LEXER_SINGLETON(type):
''' Ensures that only one log file lexer instance exists and
prevents of getting multiple callbacks
'''
_instance = None
def __call__(cls, *args, **kwargs):
''' The real constructor and first method called when
a new instance should be created.
On first instantiation class variable _instance gets itself assigned,
every subsequent instantiation try returns this object
'''
if cls._instance is None:
cls._instance = super(LOGFILE_LEXER_SINGLETON, cls).__call__(*args, **kwargs)
return cls._instance
class LOGFILE_LEXER(object):
''' A line based lexer implementation.
Whole line gets coloured if part or complete line matches regex
'''
__metaclass__ = LOGFILE_LEXER_SINGLETON
DEFAULT = 0 # the current default style
ERROR_STYLE = 60
WARNING_STYLE = 61
# define the style colors
ERROR_STYLE_FOREGROUND = (224,108,117)
WARNING_STYLE_FOREGROUND = (255,255,128)
# define the regex which return the position of the matches
ERROR_REGEX = '.*error.*'
WARNING_REGEX = '.*warning.*'
def __init__(self):
''' Register needed callbacks on first class instantiation '''
editor.callbackSync(self.styleneeded_callback, [SCINTILLANOTIFICATION.STYLENEEDED])
notepad.callback(self.bufferactivated_callback, [NOTIFICATION.BUFFERACTIVATED])
notepad.callback(self.langchanged_callback, [NOTIFICATION.LANGCHANGED])
def logfile_lexer(self, start_pos, end_pos):
''' Main lexing logic.
Gets called by styleneeded callback
'''
def style_it(match, STYLE):
''' Inform scintilla to do the styling'''
if match[1]-match[0] >= 0:
editor.startStyling(start_pos + match[0], 31)
editor.setStyling(match[1]-match[0], STYLE)
def do_regex(regex):
''' return a list of match positions
Note, is using python regular expression instead of boost::re
'''
return [m.span(0) for m in re.finditer(regex, text, flags=re.I)]
# ensure that position is really the first position for each line
start_pos = editor.positionFromLine(editor.lineFromPosition(start_pos))
# fast but potentially unsafe way to get the text of the line
text = editor.getRangePointer(start_pos, end_pos-start_pos)
# first everything will be styled with default style
style_it((start_pos, end_pos), self.DEFAULT)
# map style_it function to each match returned by do_regex
# ordering is important as it might be that a line matches
# multiple regexes - the last do_regex overwrites previous styling
map(lambda match: style_it(match, self.WARNING_STYLE), do_regex(self.WARNING_REGEX))
map(lambda match: style_it(match, self.ERROR_STYLE), do_regex(self.ERROR_REGEX))
# this needs to stay and to be the last line, to signal scintilla we are done.
editor.startStyling(end_pos,31)
def init_scintilla(self):
''' Initialize configured styles '''
editor.setMarginWidthN(0,38)
editor.setMarginWidthN(1,14)
editor.setMarginWidthN(2,0)
if editor.getLexer() != LEXER.CONTAINER:
editor.setLexer(LEXER.CONTAINER)
editor.styleSetFore(self.ERROR_STYLE, self.ERROR_STYLE_FOREGROUND)
editor.styleSetFore(self.WARNING_STYLE, self.WARNING_STYLE_FOREGROUND)
def set_lexer_doc(self,bool_value):
''' Assign the class name as an additional property
to every document which should be handled by this lexer
A value of 1 indicates it should be handled.
'''
editor.setProperty(self.__class__.__name__, 1 if bool_value is True else 0)
def is_lexer_doc(self):
''' Check if the current document is of interest
by reading the class name property.
'''
return True if editor.getPropertyInt(self.__class__.__name__, 0) == 1 else False
def styleneeded_callback(self,args):
''' Called by scintilla to inform the lexer
about the need to style the document.
If document is of interest call main logic (logfile_lexer) function
Ensures that the start position is really the first position per line
'''
if self.is_lexer_doc():
startPos = editor.getEndStyled()
lineNumber = editor.lineFromPosition(startPos)
startPos = editor.positionFromLine(lineNumber)
self.logfile_lexer(startPos, args['position'])
def bufferactivated_callback(self,args):
''' Called by notepad when document switch happens
If document is of interest styles need to be reinitialized
'''
if self.is_lexer_doc():
self.init_scintilla()
def langchanged_callback(self,args):
''' Called by notepad when a built-in or udl language switch happens
If document was previously styled by this lexer it will be reset
and therefore will not be styled by this lexer anymore until
script gets executed on this document again.
'''
if self.is_lexer_doc():
self.set_lexer_doc(False)
def main(self):
''' Main entry point
To prevent issues with other lexers document language will
be set to normal text, then document does get the class name
property assigned, styles do get initialized and main lexing
function does get called on whole document
'''
notepad.setLangType(LANGTYPE.TXT)
self.set_lexer_doc(True)
self.init_scintilla()
self.logfile_lexer(0, editor.getTextLength())
# <comment_or_delete>
# just some demo text not really needed by lexer
notepad.new()
editor.appendText('''
A line with no keywords
followed by a warning line
some
fillings
until
a line with ERROR appears
again
some
text
and a line with error and WARNING
''')
# </comment_or_delete>
LOGFILE_LEXER().main()
| 37.911458
| 96
| 0.615469
| 854
| 7,279
| 5.105386
| 0.314988
| 0.027523
| 0.011009
| 0.009633
| 0.072936
| 0.05367
| 0.029817
| 0
| 0
| 0
| 0
| 0.009873
| 0.318176
| 7,279
| 191
| 97
| 38.109948
| 0.868628
| 0.371342
| 0
| 0.1125
| 0
| 0
| 0.044422
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.15
| false
| 0
| 0.025
| 0
| 0.35
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
9b52da16ba296fc3a30eb27a8abe81a9c2046c19
| 913
|
py
|
Python
|
py_mybatis/mapper_func.py
|
malone081021/py_mybatis
|
bea69b2aa0d17d9a96ddf596bc63bd88e5a2045e
|
[
"Apache-2.0"
] | 15
|
2020-08-20T03:57:13.000Z
|
2022-02-01T03:08:14.000Z
|
py_mybatis/mapper_func.py
|
malone081021/py_mybatis
|
bea69b2aa0d17d9a96ddf596bc63bd88e5a2045e
|
[
"Apache-2.0"
] | 3
|
2020-08-20T05:43:22.000Z
|
2020-09-23T08:08:10.000Z
|
py_mybatis/mapper_func.py
|
malone081021/py_mybatis
|
bea69b2aa0d17d9a96ddf596bc63bd88e5a2045e
|
[
"Apache-2.0"
] | 10
|
2020-12-10T06:17:20.000Z
|
2022-02-26T07:07:48.000Z
|
from .logger import LOG
from .funs import *
# 函数容器
class PyFunction(object):
def __init__(self):
self.function_map = dict()
def register_func(self, function_name: str, func):
if function_name in self.function_map:
LOG.warning("function {} exist".format(function_name))
return
self.function_map[function_name] = func
def get_func(self, function_name: str):
if function_name in self.function_map:
return self.function_map[function_name]
return None
def call_func(self, function_name: str, *args):
func = self.get_func(function_name)
if func and callable(func):
return func(*args)
# 参数 转换函数
PY_PARAM_FUNCTION = PyFunction()
# 注册参数转换函数
for fun_name in default_fun_dict:
PY_PARAM_FUNCTION.register_func(fun_name, default_fun_dict[fun_name])
# 返回值转换函数
PY_RESULT_FUNCTION = PyFunction()
| 25.361111
| 73
| 0.682366
| 122
| 913
| 4.811475
| 0.336066
| 0.183986
| 0.127768
| 0.102215
| 0.335605
| 0.218058
| 0.105622
| 0
| 0
| 0
| 0
| 0
| 0.231106
| 913
| 35
| 74
| 26.085714
| 0.836182
| 0.031763
| 0
| 0.090909
| 0
| 0
| 0.01934
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.181818
| false
| 0
| 0.090909
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
9b533be27e81a8ec5389e0a6e1d39a3dcf1f48a7
| 3,283
|
py
|
Python
|
multiAgentEnv/maenv/mpe/_mpe_utils/forward_models/forward_model.py
|
eranbTAU/Closing-the-Reality-Gap-for-a-Multi-Agent-System-Using-GAN
|
3df5f8ba1069ce3f16f1ab743da9cbdd3bddd43c
|
[
"MIT"
] | null | null | null |
multiAgentEnv/maenv/mpe/_mpe_utils/forward_models/forward_model.py
|
eranbTAU/Closing-the-Reality-Gap-for-a-Multi-Agent-System-Using-GAN
|
3df5f8ba1069ce3f16f1ab743da9cbdd3bddd43c
|
[
"MIT"
] | null | null | null |
multiAgentEnv/maenv/mpe/_mpe_utils/forward_models/forward_model.py
|
eranbTAU/Closing-the-Reality-Gap-for-a-Multi-Agent-System-Using-GAN
|
3df5f8ba1069ce3f16f1ab743da9cbdd3bddd43c
|
[
"MIT"
] | 1
|
2022-02-22T11:06:40.000Z
|
2022-02-22T11:06:40.000Z
|
import torch
import numpy as np
import os
from ..utils import load_params, get_scaler, get_rescaler
class ForwardModel():
def __init__(self, robot_type=None):
try:
filename = f'.../maenv/mpe/_mpe_utils/{robot_type}_config.yaml'
params = load_params(filename).get('forward_model')
except FileNotFoundError:
print(f"{filename} not found. Note that robot_type param should 'car' or 'fish' be defined")
self.model_name = params['fd_model']
self.robot_type = robot_type
self.device = set_device(params['fdm_device'])
self.dir = '.../maenv/mpe/_mpe_utils/forward_models'
self.model = self.load_forward_model(params['fdm_input_dim'], params['fdm_output_dim'])
self.model.to(self.device)
def load_state_dict(self, path, net):
state = torch.load(path, map_location=self.device)
try:
net.load_state_dict(state)
except:
try:
net.load_state_dict(state['best_state'])
except:
net.load_state_dict(state['best_state_val'])
net.eval()
return net
def load_forward_model(self, input_dim, output_dim):
'''
load forward model for eval mode
'''
if self.robot_type=='car':
from maenv.mpe._mpe_utils.forward_models.car.NN import get_fd_model
NN, scalers = get_fd_model(self.model_name)
net = NN(input_dim=2, output_dim=3)
min_x = [scalers[0], scalers[0]]
max_x = [scalers[1], scalers[1]]
min_y = [scalers[2], scalers[4], scalers[6]]
max_y = [scalers[3], scalers[5], scalers[7]]
elif self.robot_type == 'fish':
from maenv.mpe._mpe_utils.forward_models.fish.NN import get_fd_model
NN, scalers = get_fd_model(self.model_name)
net = NN(input_dim, output_dim)
min_x = [scalers[0], scalers[2], scalers[4]]
max_x = [scalers[1], scalers[3], scalers[5]]
min_y = [scalers[6], scalers[8], scalers[10]]
max_y = [scalers[7], scalers[9], scalers[11]]
else:
raise NameError(self.robot_type)
model_path = os.path.join(self.dir, self.robot_type, 'models', self.model_name)
model = self.load_state_dict(model_path, net)
self.scaledown_x = get_scaler(np.array(min_x), np.array(max_x))
self.scaledown_y = get_scaler(np.array(min_y), np.array(max_y))
self.rescale_x = get_rescaler(np.array(min_x), np.array(max_x))
self.rescale_y = get_rescaler(np.array(min_y), np.array(max_y))
return model
def predict_single(self, x):
x = torch.from_numpy(x).unsqueeze(0).to(self.device)
with torch.no_grad():
y = self.model(x)
y = y.squeeze().detach().cpu().numpy()
y = self.rescale_y(y)
return y
def predict_batch(self, x):
x = torch.from_numpy(x).to(self.device)
with torch.no_grad():
y = self.model(x)
y = y.detach().cpu().numpy()
y = self.rescale_y(y)
return y
def set_device(device):
return torch.device(device if torch.cuda.is_available() else 'cpu') if device != 'cpu' else torch.device('cpu')
| 35.301075
| 115
| 0.603412
| 463
| 3,283
| 4.043197
| 0.231102
| 0.043269
| 0.041667
| 0.034188
| 0.383547
| 0.321581
| 0.291667
| 0.201923
| 0.178419
| 0.150641
| 0
| 0.011227
| 0.267438
| 3,283
| 92
| 116
| 35.684783
| 0.767152
| 0.009747
| 0
| 0.214286
| 0
| 0
| 0.084988
| 0.027295
| 0
| 0
| 0
| 0
| 0
| 1
| 0.085714
| false
| 0
| 0.085714
| 0.014286
| 0.257143
| 0.014286
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
9b5505ca703b310b156073810269854022e84cd6
| 948
|
py
|
Python
|
src/personal/views.py
|
vybhavpai/Blog
|
8e0adf52bfba5502f787dd339866f39b5f4856f4
|
[
"bzip2-1.0.6"
] | null | null | null |
src/personal/views.py
|
vybhavpai/Blog
|
8e0adf52bfba5502f787dd339866f39b5f4856f4
|
[
"bzip2-1.0.6"
] | 17
|
2020-06-06T00:04:23.000Z
|
2022-03-12T00:25:43.000Z
|
src/personal/views.py
|
vybhavpai/Blog
|
8e0adf52bfba5502f787dd339866f39b5f4856f4
|
[
"bzip2-1.0.6"
] | null | null | null |
from django.shortcuts import render
from blog.models import BlogPost
from operator import attrgetter
from blog.views import get_blog_queryset
from django.core.paginator import EmptyPage, PageNotAnInteger, Paginator
# Create your views here.
BLOG_POSTS_PER_PAGE = 1
def home_screen_view(request):
context = {}
query = ""
if request.GET:
query = request.GET.get('q','')
context['query'] = str(query)
blog_posts = sorted(get_blog_queryset(query),key=attrgetter('date_updated'),reverse=True)
# Pagination
page = request.GET.get('page', 1)
blog_posts_paginator = Paginator(blog_posts, BLOG_POSTS_PER_PAGE)
try:
blog_posts = blog_posts_paginator.page(page)
except PageNotAnInteger:
blog_posts = blog_posts_paginator.page(BLOG_POSTS_PER_PAGE)
except EmptyPage:
blog_posts = blog_posts_paginator.page(blog_posts_paginator.num_pages)
context['blog_posts'] = blog_posts
return render(request, "personal/home.html", context)
| 27.085714
| 90
| 0.781646
| 132
| 948
| 5.356061
| 0.363636
| 0.190948
| 0.127298
| 0.127298
| 0.157001
| 0.157001
| 0.113154
| 0.113154
| 0
| 0
| 0
| 0.002398
| 0.120253
| 948
| 34
| 91
| 27.882353
| 0.845324
| 0.035865
| 0
| 0
| 0
| 0
| 0.054885
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.043478
| false
| 0
| 0.217391
| 0
| 0.304348
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
9b551741cab1afbab203fa568039b9c06d277d87
| 5,771
|
py
|
Python
|
tools/makeplugin.py
|
rhcad/x3py
|
a631b9130673683f9c0395209ff49b94eb0ceac6
|
[
"Apache-2.0"
] | 214
|
2015-01-25T15:54:57.000Z
|
2022-03-13T13:20:34.000Z
|
tools/makeplugin.py
|
HAERBINWINE/x3py
|
a631b9130673683f9c0395209ff49b94eb0ceac6
|
[
"Apache-2.0"
] | 18
|
2015-03-11T15:42:56.000Z
|
2021-12-23T10:11:03.000Z
|
tools/makeplugin.py
|
HAERBINWINE/x3py
|
a631b9130673683f9c0395209ff49b94eb0ceac6
|
[
"Apache-2.0"
] | 126
|
2015-01-19T03:28:48.000Z
|
2022-03-16T06:52:57.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Function: Create a plugin project based on example project.
This script has been tested with ActivePython 2.7/3.2.
Usage: python makeplugin.py [prjname [pkgname [srcprj [srcpkg [useswig]]]]]
or double click the file 'makeplugin.py'.
prjname: name of the new project.
pkgname: package name of the new project, the default value is 'example'.
srcprj: template (exists) project name, the default value is 'pltempl'.
srcpkg: package name of the template project, the default value is 'example'.
useswig: need swig files or not (y/n), the default value is 'n'.
Creator: Zhang Yungui <rhcad@hotmail.com>
Date: 2011.11.14
ChangeList:
1. Implemented the function: multi_replace, copyfiles, makeproj
2. Auto copy interface files and skip swig files. [2011.12.14]
3. Auto skip empty folders. [2012.02.21]
"""
import os, sys, re, uuid
def multi_replace(text, adict):
rx = re.compile('|'.join(map(re.escape, adict)))
def xlat(match):
return adict[match.group(0)]
return rx.sub(xlat, text)
def copyfiles(srcdir, dstdir, pairs, srcprj, callback, noswig):
if ".svn" in srcdir or not os.path.exists(srcdir): return
if noswig and "swig" in srcdir: return
has_i = noswig and os.path.exists(os.path.join(srcdir, srcprj + ".i"))
for fn in os.listdir(srcdir):
srcfile = os.path.join(srcdir, fn)
dstfile = os.path.join(dstdir, multi_replace(fn, pairs))
if os.path.isdir(srcfile):
copyfiles(srcfile, dstfile, pairs, srcprj, callback, noswig)
continue
if has_i and (fn==srcprj+".i" or fn.startswith("test"+srcprj)): continue
if not os.path.exists(dstfile) and callback(fn, pairs):
if not os.path.exists(dstdir): os.makedirs(dstdir)
open(dstfile, "wb").write(open(srcfile, "rb").read())
isutf8 = False
try:
text = open(dstfile).read()
except UnicodeDecodeError:
try:
text = open(dstfile,'r',-1,'utf-8').read()
isutf8 = True
except UnicodeDecodeError:
print("* Fail to read '%s' as utf-8 encoding." % (dstfile,))
continue
newtext = multi_replace(text, pairs)
if newtext != text:
try:
if (isutf8):
open(dstfile, 'w',-1,'utf-8').write(newtext)
print('[replaced] %s [utf-8]' % (dstfile,))
else:
open(dstfile, 'w').write(newtext)
print('[replaced] %s' % (dstfile,))
except UnicodeDecodeError:
open(dstfile, 'w',-1,'utf-8').write(newtext)
print('[replaced] %s [utf-8]' % (dstfile,))
else:
print('[created] %s' % (dstfile,))
def makeproj(prjname, pkgname, srcprj, srcpkg, noswig):
rootpath = os.path.abspath('..')
basepath = os.path.join(rootpath, 'source', srcpkg, srcprj)
pkgpath = os.path.join(rootpath, 'source', pkgname)
dstdir = os.path.join(pkgpath, prjname)
if prjname == '':
print("Need input the project name.")
return
if not os.path.exists(basepath):
print("\nPlease input a valid exists template project name."
"\n\'%s\' does not exist." % (basepath,))
return
if not os.path.exists(pkgpath):
os.makedirs(pkgpath)
srcmk = os.path.join(rootpath, 'source', srcpkg, 'Makefile')
if os.path.exists(srcmk):
dstmk = os.path.join(pkgpath, 'Makefile')
open(dstmk, "w").write(open(srcmk).read())
print('%s [created]' % (dstmk,))
pairs = {srcprj:prjname, srcpkg:pkgname}
def matchfile(filename, pairs):
if filename.find("_wrap.cxx") > 0 or filename.find("_wrap.h") > 0:
return False
return True
copyfiles(basepath, dstdir, pairs, srcprj, matchfile, noswig)
basepath = os.path.join(rootpath, 'interface', srcpkg, srcprj)
dstdir = os.path.join(rootpath, 'interface', pkgname, prjname)
intpairs = pairs
intpairs["78d30c77-e0f0-48a3-a489-dd4327759c27"] = str(uuid.uuid1())
intpairs["94071767-ba6b-4769-9eb4-2ebf469289f3"] = str(uuid.uuid1())
intpairs["feefc399-29f2-4354-8eeb-048d4cf56567"] = str(uuid.uuid1())
copyfiles(basepath, dstdir, intpairs, srcprj, matchfile, noswig)
def matchproj(filename, pairs):
if ".user" in filename: return False
for key in pairs.keys():
if filename.startswith(key + '_') or \
filename.startswith(key + '.'): return True
return False
projects = os.path.join(rootpath, 'projects')
copyfiles(projects, projects, pairs, srcprj, matchproj, noswig)
if __name__=="__main__":
def inputparam(index, prompt, default=''):
if len(sys.argv) > index: ret = sys.argv[index]
else: ret = raw_input(prompt)
if ret == '': ret = default
return ret
prjname = inputparam(1, 'New project name: ')
pkgname = inputparam(2, 'Package name of the new project (default: example): ', 'example')
srcprj = inputparam(3, 'Template (exists) project name (default: pltempl): ', 'pltempl')
srcpkg = inputparam(4, 'Package name of the template project (default: example): ', 'example')
useswig = inputparam(5, 'Need swig (y/n) ? (default: n): ', 'n')
makeproj(prjname, pkgname, srcprj, srcpkg, 'n' in useswig)
if len(sys.argv) < 3: raw_input("Press <ENTER> to end.")
| 41.517986
| 99
| 0.582741
| 691
| 5,771
| 4.839363
| 0.287988
| 0.035885
| 0.032895
| 0.032297
| 0.186902
| 0.114833
| 0.034689
| 0.034689
| 0.034689
| 0.034689
| 0
| 0.029504
| 0.283486
| 5,771
| 138
| 100
| 41.818841
| 0.779202
| 0.165309
| 0
| 0.185567
| 0
| 0
| 0.149126
| 0.022463
| 0
| 0
| 0
| 0
| 0
| 1
| 0.072165
| false
| 0
| 0.010309
| 0.010309
| 0.164948
| 0.082474
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
9b5be5c5ed6504a2c9ccae3984febdda2e35498d
| 5,862
|
py
|
Python
|
server/app.py
|
LeartKrasniqi/Sign-Language-Interpreter
|
46ffef29eaf3746d48654bc4430d6e921120d33e
|
[
"MIT"
] | 3
|
2020-02-25T14:43:33.000Z
|
2020-10-10T08:42:27.000Z
|
server/app.py
|
LeartKrasniqi/Sign-Language-Interpreter
|
46ffef29eaf3746d48654bc4430d6e921120d33e
|
[
"MIT"
] | null | null | null |
server/app.py
|
LeartKrasniqi/Sign-Language-Interpreter
|
46ffef29eaf3746d48654bc4430d6e921120d33e
|
[
"MIT"
] | 1
|
2020-02-24T19:46:11.000Z
|
2020-02-24T19:46:11.000Z
|
import os
from flask import Flask, flash, request, redirect, url_for, jsonify
from flask_cors import CORS
from werkzeug.utils import secure_filename
import subprocess
# set up text to speech map
from nltk.stem import PorterStemmer
from nltk.tokenize import word_tokenize
# speech recognition
import speech_recognition as sr
recognizer = sr.Recognizer()
# Create dictionary
word_dict = dict()
stem_dict = dict()
# Create stemmer
stemmer = PorterStemmer()
# Read in the dictionary file
dict_dir = "../img/"
dict_filename = dict_dir + "dict.txt"
dict_file = open(dict_filename, "r")
dict_lines = dict_file.read().splitlines()
# Make map of word and word stem to file
for line in dict_lines:
split = line.split()
word = split[0]
filepath = dict_dir + split[1]
if word not in word_dict.keys():
word_dict[word] = filepath
stem = stemmer.stem(word)
if stem not in stem_dict.keys():
stem_dict[stem] = filepath
# List of words that do not need a sign
non_signs = ["is", "are", "be"]
# The alphabet
alpha = "abcdefghijklmnopqrstuvwxyz"
# Translate the sentences
# takes recognized_words as input and
# returns a valid path to each word or letter
def text2imgpath(recognized_words):
# sample file that contains a few lines of text:
# sentences_file = open("../text2img/tests/sentences.txt", "r")
s = recognized_words
img_links = []
tokens = word_tokenize(s)
for t in tokens:
t = t.lower()
# Skip words that do not need a sign
if t in non_signs:
continue
# Get stem of word
wordstem = stemmer.stem(t)
if t in word_dict.keys():
# word image
img_links.append(word_dict[t])
elif wordstem in stem_dict.keys():
img_links.append(stem_dict[wordstem])
else:
# letter image
chars = list(t)
for c in chars:
# Skip any thing not in our dictionary
if c not in alpha:
continue
path = "../img/letters/{}.png".format(c)
img_links.append(path)
return img_links
# set up server
app = Flask(__name__)
UPLOAD_FOLDER_VIDEO = './videos/'
UPLOAD_FOLDER_VOICE = './voice/'
UPLOAD_FOLDER_IMAGE = './images/'
ALLOWED_EXTENSIONS = {'mpg', 'mp4'}
app = Flask(__name__)
app.config['UPLOAD_FOLDER_VIDEO'] = UPLOAD_FOLDER_VIDEO
app.config['UPLOAD_FOLDER_VOICE'] = UPLOAD_FOLDER_VOICE
app.config['UPLOAD_FOLDER_IMAGE'] = UPLOAD_FOLDER_IMAGE
CORS(app)
def allowed_file(filename):
return '.' in filename and filename.rsplit('.', 1)[1].lower() in ALLOWED_EXTENSIONS
@app.route('/')
def hello():
return "Hello World!"
@app.route('/audio',methods=['POST', 'GET'])
def audio():
if request.method == 'POST':
file = request.files['audio']
filename = "speech"
filename = secure_filename(filename)
audiopath = os.path.join(app.config['UPLOAD_FOLDER_VOICE'], filename + ".webm")
audionewpath = os.path.join(app.config['UPLOAD_FOLDER_VOICE'], filename + ".wav")
file.save(audiopath)
command = "ffmpeg -i " + audiopath + " -ab 160k -ac 2 -y -ar 44100 -vn " + audionewpath
os.system(command)
speech = sr.AudioFile(audionewpath)
with speech as audio_file:
try:
recognizer.adjust_for_ambient_noise(audio_file)
audio = recognizer.record(audio_file, offset = 0)
# list of recognized words in list
recog_words = recognizer.recognize_google(audio).lower() #.split(",!? ")
# get image path for the word
imgpath = text2imgpath(recog_words)
return(jsonify(imgpath))
except Exception as e:
print("Exception found!: {}: {}".format(type(e), e.message))
return "success"
@app.route('/video', methods=['POST','GET'])
def audiovideo():
if request.method == 'POST':
file = request.files['audiovideo']
filename = "asl_video.webm"
filename = secure_filename(filename)
file.save(os.path.join(app.config['UPLOAD_FOLDER_VIDEO'], filename))
return jsonify(status="success", text="blah")
@app.route('/videofile', methods=['POST','GET'])
def videofile():
if request.method == 'POST':
file = request.files['video']
filename = "asl_video.mp4"
filename = secure_filename(filename)
file.save(os.path.join(app.config['UPLOAD_FOLDER_VIDEO'], filename))
return jsonify(status="success", text="blah")
@app.route('/image', methods=['POST','GET'])
def image():
if request.method == 'POST':
file = request.files['image']
filename = "asl_image.png"
filename = secure_filename(filename)
file.save(os.path.join(app.config['UPLOAD_FOLDER_IMAGE'], filename))
return jsonify(letter='H')
@app.route('/api/upload', methods=['POST','GET'])
def upload_file():
if request.method == 'POST':
if 'file' not in request.files:
flash('No file part')
return redirect(request.url)
file = request.files['file']
if file.filename == '':
flash('No selected file')
return redirect(request.url)
if file and allowed_file(file.filename):
filename = secure_filename(file.filename)
file.save(os.path.join(app.config['UPLOAD_FOLDER'], filename))
return redirect(url_for('uploaded_file', filename=filename))
return '''
<!doctype html>
<title>Upload new File</title>
<h1>Upload new File</h1>
<form method=post enctype=multipart/form-data>
<input type=file name=file>
<input type=submit value=Upload>
</form>
'''
if __name__ == '__main__':
app.run(host="localhost", port=42248, debug=True)
| 31.686486
| 95
| 0.628796
| 736
| 5,862
| 4.865489
| 0.275815
| 0.050265
| 0.037699
| 0.052779
| 0.211393
| 0.178163
| 0.178163
| 0.126222
| 0.126222
| 0.101648
| 0
| 0.00589
| 0.247015
| 5,862
| 184
| 96
| 31.858696
| 0.805392
| 0.105084
| 0
| 0.141791
| 0
| 0
| 0.172315
| 0.01436
| 0
| 0
| 0
| 0
| 0
| 1
| 0.059701
| false
| 0
| 0.059701
| 0.014925
| 0.201493
| 0.007463
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
9b5d3c5659fc7c63b174be7b4744d1de06e0f242
| 1,128
|
py
|
Python
|
main/PluginDemos/MomentOfInertia/MomentOfInertia3D/Simulation/MomentOfInertia3DSteppables.py
|
JulianoGianlupi/nh-cc3d-4x-base-tool
|
c0f4aceebd4c5bf3ec39e831ef851e419b161259
|
[
"CC0-1.0"
] | null | null | null |
main/PluginDemos/MomentOfInertia/MomentOfInertia3D/Simulation/MomentOfInertia3DSteppables.py
|
JulianoGianlupi/nh-cc3d-4x-base-tool
|
c0f4aceebd4c5bf3ec39e831ef851e419b161259
|
[
"CC0-1.0"
] | null | null | null |
main/PluginDemos/MomentOfInertia/MomentOfInertia3D/Simulation/MomentOfInertia3DSteppables.py
|
JulianoGianlupi/nh-cc3d-4x-base-tool
|
c0f4aceebd4c5bf3ec39e831ef851e419b161259
|
[
"CC0-1.0"
] | 1
|
2021-02-26T21:50:29.000Z
|
2021-02-26T21:50:29.000Z
|
from cc3d.core.PySteppables import *
class MomentOfInertiaPrinter3D(SteppableBasePy):
def __init__(self, frequency=10):
SteppableBasePy.__init__(self, frequency)
self.semi_minor_axis = 15
self.semi_median_axis = 8
self.semi_major_axis = 5
def start(self):
self.generate_ellipsoid(self.semi_minor_axis, self.semi_median_axis, self.semi_major_axis)
def generate_ellipsoid(self, semi_minor_axis, semi_median_axis, semi_major_axis):
cell = self.new_cell(cell_type=1)
for x, y, z in self.every_pixel():
if (x - self.dim.x / 2.0) ** 2 / semi_minor_axis ** 2 + (
y - self.dim.y / 2.0) ** 2 / semi_major_axis ** 2 + (
z - self.dim.z / 2.0) ** 2 / semi_median_axis ** 2 < 1:
self.cell_field[x, y, z] = cell
def step(self, mcs):
for cell in self.cellList:
# preferred way of accessing information about semiminor axes
axes = self.momentOfInertiaPlugin.getSemiaxes(cell)
print("minorAxis=", axes[0], " majorAxis=", axes[2], " medianAxis=", axes[1])
| 40.285714
| 98
| 0.618794
| 152
| 1,128
| 4.342105
| 0.375
| 0.084848
| 0.078788
| 0.077273
| 0.10303
| 0.10303
| 0
| 0
| 0
| 0
| 0
| 0.03023
| 0.266844
| 1,128
| 27
| 99
| 41.777778
| 0.767836
| 0.052305
| 0
| 0
| 0
| 0
| 0.030928
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| false
| 0
| 0.05
| 0
| 0.3
| 0.05
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
9b5d42d58638f8e19f6b1354d952d443c3e13a52
| 4,003
|
py
|
Python
|
skirt-sniffer.py
|
macenpav/skirt-sniffer
|
3ba45d5f7f22b83417525835dc594e698c4ebb1a
|
[
"MIT"
] | null | null | null |
skirt-sniffer.py
|
macenpav/skirt-sniffer
|
3ba45d5f7f22b83417525835dc594e698c4ebb1a
|
[
"MIT"
] | null | null | null |
skirt-sniffer.py
|
macenpav/skirt-sniffer
|
3ba45d5f7f22b83417525835dc594e698c4ebb1a
|
[
"MIT"
] | null | null | null |
import data_module
import email_module
import db_module
from jinja2 import Environment, FileSystemLoader
from os import path
import time
from datetime import datetime
DIR_PATH = path.dirname(path.realpath(__file__))
NEW_SKIRT_SUBJECT = r"Novinka! - {0}"
NEW_PRINT_SUBJECT = r"NOVÉ TISKY! - {0}"
DAY_INIT_SUBJECT = r"Keporkak se probouzí! - {0}"
PAGE_LIMIT = 20 # limit to read pages so we don't deadlock while mining data
def send_email(subject_base, data, **kwargs):
email = email_module.Email()
subject = subject_base.format(data[0]['name']);
if len(data) > 1:
subject += ' a další ...'
file_loader = FileSystemLoader(path.join(DIR_PATH, 'templates'))
env = Environment(loader=file_loader)
template = env.get_template('email.html')
email.set_content(template.render(title=subject, items=data))
email.set_subject(subject)
if 'is_print' in kwargs:
if kwargs['is_print']:
email.send(email_module.CONFIG_PRINT_R) # send email to print Recipients
else:
email.send(email_module.CONFIG_REGULAR_R) # send email to regular Recipients
def send_wakeup_email():
email = email_module.Email()
subject = DAY_INIT_SUBJECT.format(datetime.strftime(datetime.today(), '%y-%m-%d'));
file_loader = FileSystemLoader(path.join(DIR_PATH, 'templates'))
env = Environment(loader=file_loader)
template = env.get_template('wakeup.html')
email.set_content(template.render(title=subject))
email.set_subject(subject)
email.send(email_module.CONFIG_ADMIN_R) # send email to Admin only
def run_base():
page_no = 0
conn = db_module.open_connection()
db_module.create_database(conn, db_module.default_product_table)
new_data = []
while True:
url = data_module.get_url(page_no)
products = data_module.mine(url)
# mine until we can, we don't know how many pages are in the e-shop
if products and (page_no < PAGE_LIMIT):
new_products = db_module.insert_products(conn, db_module.default_product_table, products)
if new_products:
for prod in new_products:
new_data.append(prod)
else:
break
page_no += 1
else:
break
db_module.close_connection(conn)
if new_data:
print("run_base({0}): New data found - sending email.".format(str(time.time())))
send_email(NEW_SKIRT_SUBJECT, new_data, is_print=False)
else:
print("run_base({0}): No new data.".format(str(time.time())))
def run_print():
conn = db_module.open_connection()
db_module.create_database(conn, db_module.default_print_table)
url = data_module.get_url_print()
products = data_module.mine(url)
new_data = None
if products:
new_data = db_module.insert_products(conn, db_module.default_print_table, products)
db_module.close_connection(conn)
if new_data:
print("run_print({0}): New data found - sending email.".format(str(time.time())))
send_email(NEW_PRINT_SUBJECT, new_data, is_print=True)
else:
print("run_print({0}): No new data.".format(str(time.time())))
def day_init_check():
conn = db_module.open_connection()
db_module.create_today_database(conn)
if db_module.is_today_init(conn):
db_module.insert_today(conn)
print("day_init_check({0}): Today's first mining - sending email.".format(str(time.time())))
send_wakeup_email()
db_module.close_connection(conn)
if __name__ == "__main__":
print("main({0}): Checking if server is available.".format(str(time.time())))
if data_module.is_server_available() is False:
print("main({0}): Unable to ping server.".format(str(time.time())))
else:
day_init_check()
print("main({0}): Starting to mine data.".format(str(time.time())))
run_base()
run_print()
print("main({0}): Finished mining.".format(str(time.time())))
| 32.024
| 101
| 0.671496
| 557
| 4,003
| 4.56553
| 0.228007
| 0.056626
| 0.046009
| 0.060165
| 0.470704
| 0.358632
| 0.329925
| 0.316948
| 0.233582
| 0.209988
| 0
| 0.005999
| 0.208843
| 4,003
| 124
| 102
| 32.282258
| 0.796969
| 0.05321
| 0
| 0.282609
| 0
| 0
| 0.128734
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.054348
| false
| 0
| 0.076087
| 0
| 0.130435
| 0.195652
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
9b5e52eda69e6b966095678067006f5bc461763d
| 3,661
|
py
|
Python
|
gos/manager.py
|
sergey-aganezov-jr/gos
|
fb4d210284f3037c5321250cb95f3901754feb6b
|
[
"MIT"
] | null | null | null |
gos/manager.py
|
sergey-aganezov-jr/gos
|
fb4d210284f3037c5321250cb95f3901754feb6b
|
[
"MIT"
] | null | null | null |
gos/manager.py
|
sergey-aganezov-jr/gos
|
fb4d210284f3037c5321250cb95f3901754feb6b
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from gos.configuration import Configuration
from gos.exceptions import GOSTaskException, GOSExecutableContainerException
from gos.executable_containers import ExecutableContainer
from gos.tasks import TaskLoader
class Manager(object):
def __init__(self, config):
self.configuration = config
self.tasks_classes = {}
self.tasks_instances = {}
self.executable_containers_classes = {}
self.executable_containers_instances = {}
def initiate_tasks(self):
""" Loads all tasks using `TaskLoader` from respective configuration option """
self.tasks_classes = TaskLoader().load_tasks(
paths=self.configuration[Configuration.ALGORITHM][Configuration.TASKS][Configuration.PATHS])
def instantiate_tasks(self):
""" All loaded tasks are initialized. Depending on configuration fails in such instantiations may be silent """
self.tasks_instances = {}
for task_name, task_class in self.tasks_classes.items():
try:
self.tasks_instances[task_name] = task_class()
except Exception as ex:
if not self.configuration[Configuration.ALGORITHM][Configuration.IOSF]:
raise GOSTaskException("An exception happened during the task instantiation."
"{exception}".format(exception=ex))
def initiate_executable_containers(self):
for entry in self.configuration[Configuration.ALGORITHM]["executable_containers"]:
if "reference" in entry:
reference = entry["reference"]
for ec_config in self.configuration[Configuration.ALGORITHM][reference]:
ec_config["group_reference_name"] = reference
result = ExecutableContainer.setup_from_config(manager=self, config=ec_config)
self.executable_containers_instances[result.name] = result
elif "paths" in entry:
paths = entry["paths"]
for path in paths:
try:
for ec_instance in ExecutableContainer.setup_from_file(file_path=path):
self.executable_containers_instances[ec_instance.name] = ec_instance
except GOSExecutableContainerException:
continue
if "pipeline" not in self.configuration[Configuration.ALGORITHM]["executable_containers"]:
pipeline_config = self.configuration[Configuration.ALGORITHM]["pipeline"]
if "name" not in pipeline_config:
pipeline_config["name"] = "pipeline"
self.executable_containers_instances["pipeline"] = ExecutableContainer.setup_from_config(manager=self,
config=pipeline_config)
def instantiate_executable_containers(self):
for executable_container in self.executable_containers_instances.values():
for entry_name in executable_container.entries_names:
try:
entry = self.tasks_instances[entry_name]
except KeyError:
entry = self.executable_containers_instances[entry_name]
executable_container.entries.append(entry)
def run(self):
self.executable_containers_instances["pipeline"].run(manager=self)
def get_task_instance(self, task_name):
return self.tasks_instances[task_name]
def get_executable_container_instance(self, ec_name):
return self.executable_containers_instances[ec_name]
| 50.847222
| 124
| 0.645179
| 353
| 3,661
| 6.467422
| 0.243626
| 0.122646
| 0.094612
| 0.115637
| 0.250986
| 0.098117
| 0.098117
| 0
| 0
| 0
| 0
| 0.000379
| 0.279978
| 3,661
| 71
| 125
| 51.56338
| 0.865706
| 0.05463
| 0
| 0.084746
| 0
| 0
| 0.058295
| 0.012181
| 0
| 0
| 0
| 0
| 0
| 1
| 0.135593
| false
| 0
| 0.067797
| 0.033898
| 0.254237
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
9b5f11338fa982d437506f6ebd36acb9e68351e8
| 2,608
|
py
|
Python
|
couchcrdt/crdt.py
|
drsm79/couch-crdt
|
1717a8b03a488793984d7209f6da78c395b3477f
|
[
"Apache-2.0"
] | null | null | null |
couchcrdt/crdt.py
|
drsm79/couch-crdt
|
1717a8b03a488793984d7209f6da78c395b3477f
|
[
"Apache-2.0"
] | null | null | null |
couchcrdt/crdt.py
|
drsm79/couch-crdt
|
1717a8b03a488793984d7209f6da78c395b3477f
|
[
"Apache-2.0"
] | null | null | null |
import requests
import json
class CRDT(object):
"""
A base class CRDT. This only takes care of getting the value and putting
the local state of the CRDT from/to the database. What those things mean is
up to specific implementations.
"""
def __init__(self, name, url, db, auth=(), params={}, always_write=False):
"""
name: an identifier for the crdt, translates to a key in a CouchDB view
url: the full url of the crdt, e.g. a named view
db: the url of the database
auth: a tuple of (username, password)
params: a dict of additional query string parameters to send
to the view
always_write: write out the state of the CRDT
"""
self.name = name
self.url = url
self.database = db
self.session = requests.Session()
self.session.auth = auth
self.session.headers = {
'User-Agent': 'couchcrdt/0.0.1',
'Content-Type': 'application/json'
}
p = {'reduce': True, 'group': True, 'key': self.name}
p.update(params)
self.params = dict((k, json.dumps(v)) for k, v in p.iteritems())
self.value = None
self.state = None
self.always_write = always_write
self.default_state = None
def get(self):
"""
Get the latest value from the database for the CRDT and eliminate local
state.
"""
r = self.session.get(self.url, params=self.params)
r.raise_for_status()
self.value = self._parse(r)
self._update(self.default_state)
def put(self):
"""
Write the CRDT's local state (not it's value) to the database
"""
r = self.session.post(
self.database,
json.dumps({
'value': self._serialise(),
'name': self.name,
'type': '.'.join([
self.__class__.__module__,
self.__class__.__name__
])
})
)
r.raise_for_status()
def _update(self, state):
"""
Update the local state
"""
self.state = state
if self.always_write:
self.put()
def _get_state(self):
"""
Return the internal, local only state
"""
return self.state
def _parse(self, data):
return data.json()['rows'][0]['value']
def _serialise(self):
"""
Return the externalised state of the CRDT, defaults to the local state
"""
return self._get_state()
| 28.347826
| 79
| 0.546012
| 321
| 2,608
| 4.308411
| 0.333333
| 0.03543
| 0.02603
| 0.030369
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.002363
| 0.350844
| 2,608
| 91
| 80
| 28.659341
| 0.81453
| 0.292945
| 0
| 0.041667
| 0
| 0
| 0.055351
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.145833
| false
| 0
| 0.041667
| 0.020833
| 0.270833
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
9b6200504eb1e628d498d1956f71cf3b490895b3
| 1,708
|
py
|
Python
|
legacy/dx/simulator/simulator_diagnoser/test/diagnosis/apt_diagnoser_test.py
|
GaloisInc/adapt
|
2ccff778d3e77505899266572f8f7caacb5b630f
|
[
"BSD-3-Clause"
] | 2
|
2020-04-09T13:04:25.000Z
|
2021-09-24T14:17:26.000Z
|
legacy/dx/simulator/simulator_diagnoser/test/diagnosis/apt_diagnoser_test.py
|
GaloisInc/adapt
|
2ccff778d3e77505899266572f8f7caacb5b630f
|
[
"BSD-3-Clause"
] | null | null | null |
legacy/dx/simulator/simulator_diagnoser/test/diagnosis/apt_diagnoser_test.py
|
GaloisInc/adapt
|
2ccff778d3e77505899266572f8f7caacb5b630f
|
[
"BSD-3-Clause"
] | 3
|
2019-09-20T20:49:54.000Z
|
2021-09-02T17:33:47.000Z
|
import unittest
from simulator_diagnoser.graph import InmemoryGraph
from simulator_diagnoser.diagnosis import APTDiagnoser
from simulator_diagnoser.matcher import Terminal, \
Sequence, \
StatelessMatcher
class APTDiagnoserTest(unittest.TestCase):
def setUp(self):
# Graph =
# 9
# / | \
# 6 7 8
# \ / \ /
# 4 5
# / \ / \
# 1 2 3
self.g1 = InmemoryGraph()
edges = [(1, 4), (2, 4), (2, 5), (3, 5),
(4, 6), (4, 7), (5, 7), (5, 8),
(6, 9), (7, 9), (8, 9)]
for edge in edges:
self.g1.add_edge(*edge)
self.g1.add_node_label(1, 'A')
self.g1.add_node_label(3, 'A')
self.g1.add_node_label(7, 'B')
self.g1.add_node_label(9, 'C')
g1 = Sequence([Terminal('A'),
Terminal('B'),
Terminal('C')],
'sequence')
self.matcher1 = StatelessMatcher(g1)
self.apt_diagnoser = APTDiagnoser(self.g1, self.matcher1)
def test_forward_analysis(self):
result = self.apt_diagnoser.forward_analysis()
self.assertEqual(9, result.get_rank()[0][0])
def test_backward_analysis(self):
result = self.apt_diagnoser.forward_analysis()
node = result.get_rank()[0][0]
paths = self.apt_diagnoser.backward_analysis(node)
self.assertEqual(2, len(paths))
def test_diagnoser(self):
self.apt_diagnoser.store_diagnoses()
self.assertEqual(2, len(self.g1.diagnoses))
if __name__ == '__main__':
unittest.main()
| 31.054545
| 65
| 0.527518
| 192
| 1,708
| 4.505208
| 0.296875
| 0.055491
| 0.052023
| 0.060116
| 0.233526
| 0.157225
| 0.113295
| 0.113295
| 0
| 0
| 0
| 0.048171
| 0.343677
| 1,708
| 54
| 66
| 31.62963
| 0.723461
| 0.039813
| 0
| 0.054054
| 0
| 0
| 0.014102
| 0
| 0
| 0
| 0
| 0
| 0.081081
| 1
| 0.108108
| false
| 0
| 0.108108
| 0
| 0.243243
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
9b650fbea222942a995d60c70b774dcb9b12f9c7
| 888
|
py
|
Python
|
etl/produce_map.py
|
hbmartin/sub9-client
|
0f85639412e2a2d695e9f6a56913dcb055af82b2
|
[
"CC-BY-4.0"
] | null | null | null |
etl/produce_map.py
|
hbmartin/sub9-client
|
0f85639412e2a2d695e9f6a56913dcb055af82b2
|
[
"CC-BY-4.0"
] | null | null | null |
etl/produce_map.py
|
hbmartin/sub9-client
|
0f85639412e2a2d695e9f6a56913dcb055af82b2
|
[
"CC-BY-4.0"
] | null | null | null |
#!/usr/bin/env python3
"""Roll up the geo6 json to produce a tree suitable for sub9 query"""
__author__ = "H. Martin"
__version__ = "0.1.0"
import json
import math
from random import randint
data = {}
transformed = {}
with open('geohash_counter_6.json', encoding='utf-8') as data_file:
data = json.loads(data_file.read())
for geo6 in data:
geohash_info = {}
count = math.ceil(data[geo6] * 0.1)
geohash_info['count'] = count
geo4 = geo6[:4]
if geo4 not in transformed:
transformed[geo4] = {"count":0}
transformed[geo4]["count"] += count
geo5 = geo6[:5]
if geo5 not in transformed[geo4]:
transformed[geo4][geo5] = {"count":0}
transformed[geo4][geo5]["count"] += count
transformed[geo4][geo5][geo6] = geohash_info
with open('full_map.json', 'w', encoding='utf-8') as outfile:
json.dump(transformed, outfile, indent=2)
| 24.666667
| 69
| 0.653153
| 127
| 888
| 4.440945
| 0.464567
| 0.159574
| 0.101064
| 0.049645
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.047486
| 0.193694
| 888
| 36
| 70
| 24.666667
| 0.740223
| 0.095721
| 0
| 0
| 0
| 0
| 0.106516
| 0.027569
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.125
| 0
| 0.125
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
9b6659e81f23a3d9ae4d6dd42d1200115d52d82d
| 2,220
|
py
|
Python
|
src/routers/v1/endpoints/facetime.py
|
kal-byte/my-api
|
e1fccfa57cda874753d48411f7bfff06afbdb019
|
[
"MIT"
] | 2
|
2021-03-13T14:43:05.000Z
|
2021-03-13T15:10:58.000Z
|
src/routers/v1/endpoints/facetime.py
|
kal-byte/my-api
|
e1fccfa57cda874753d48411f7bfff06afbdb019
|
[
"MIT"
] | null | null | null |
src/routers/v1/endpoints/facetime.py
|
kal-byte/my-api
|
e1fccfa57cda874753d48411f7bfff06afbdb019
|
[
"MIT"
] | 2
|
2021-03-13T14:42:40.000Z
|
2021-03-17T18:18:10.000Z
|
"""
MIT License
Copyright 2021-Present kal-byte
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
to deal in the Software without restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software,
and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
import utils
import polaroid
from io import BytesIO
from fastapi import APIRouter
from fastapi.responses import StreamingResponse, JSONResponse
router = APIRouter()
@utils.to_thread
def generate_image(first_image: bytes, second_image: bytes) -> BytesIO:
"""Backend code that generates the image that provides the facetime effect."""
img_one = polaroid.Image(first_image)
img_two = polaroid.Image(second_image)
img_one.resize(1024, 1024, 5)
img_two.resize(256, 256, 5)
facetime_btns = polaroid.Image("./static/facetimebuttons.png")
facetime_btns.resize(1024, 1024, 5)
img_one.watermark(img_two, 15, 15)
img_one.watermark(facetime_btns, 0, 390)
buffer = BytesIO(img_one.save_bytes())
return buffer
@router.get("/facetime")
async def serve_image(first: str, second: str):
first = await utils.get_image(first)
second = await utils.get_image(second)
if isinstance(first, JSONResponse):
return first
if isinstance(second, JSONResponse):
return second
image = await generate_image(first, second)
return StreamingResponse(image, media_type="image/png")
| 34.153846
| 102
| 0.763964
| 321
| 2,220
| 5.211838
| 0.464174
| 0.0526
| 0.015541
| 0.017932
| 0.021518
| 0
| 0
| 0
| 0
| 0
| 0
| 0.020022
| 0.167568
| 2,220
| 64
| 103
| 34.6875
| 0.885281
| 0.513964
| 0
| 0
| 0
| 0
| 0.043152
| 0.026266
| 0
| 0
| 0
| 0
| 0
| 1
| 0.035714
| false
| 0
| 0.178571
| 0
| 0.357143
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
9b6c6e386630575a00a0344243a155f4ff7c4cd5
| 979
|
py
|
Python
|
src/components/sidebar1.py
|
DishaDH123/Disha551Dashboard
|
3354ff94134e6094f7e5759f17abe5d988a32198
|
[
"MIT"
] | null | null | null |
src/components/sidebar1.py
|
DishaDH123/Disha551Dashboard
|
3354ff94134e6094f7e5759f17abe5d988a32198
|
[
"MIT"
] | null | null | null |
src/components/sidebar1.py
|
DishaDH123/Disha551Dashboard
|
3354ff94134e6094f7e5759f17abe5d988a32198
|
[
"MIT"
] | null | null | null |
import dash
from dash import dcc
from dash import html,Input, Output
import dash_bootstrap_components as dbc
from ..app import app
from .sidebar_function_tab1 import TAB1_DROPDOWN,time_scale,date_slider,choose_fun_tab1
SIDEBAR1 = [dbc.Row("Energy Dashboard",class_name="title",style={"font-size":"30px","padding-left": "10px","padding-top": "10px"}),
dbc.Row("___________________________________________"),
html.Br(),
dbc.Row("This dashboard figure out which factors make a difference to house temperature and humidity. You can choose the factors from the dropdown below.",class_name="description"),
html.Br(),
dbc.Label("Compare Across:",class_name="sub_title",style={"font-size":"20px"}),
html.Br(),
dbc.Row(TAB1_DROPDOWN),
html.Br(),
dbc.Row(id="selection_tab1"),
html.Br(),
dbc.Row(time_scale),
html.Br(),
dbc.Row(date_slider),
]
@app.callback(Output("selection_tab1", "children"), Input("tab1_dropdown", "value"))
def output_div(value):
return choose_fun_tab1(value)
| 34.964286
| 181
| 0.763023
| 144
| 979
| 4.736111
| 0.479167
| 0.061584
| 0.079179
| 0.087977
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.018931
| 0.082737
| 979
| 27
| 182
| 36.259259
| 0.740535
| 0
| 0
| 0.25
| 0
| 0.041667
| 0.361593
| 0.043922
| 0
| 0
| 0
| 0
| 0
| 1
| 0.041667
| false
| 0
| 0.25
| 0.041667
| 0.333333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
9b6d446a16c44d84b91f269a1e5ec023a880fe0e
| 4,176
|
py
|
Python
|
interbotix_examples/interbotix_moveit_interface/get_trajectory_server.py
|
junyaoshi/interbotix_ros_arms
|
ca37aa98a66acdbd17557d6808d6c32f0ad4799b
|
[
"BSD-2-Clause"
] | null | null | null |
interbotix_examples/interbotix_moveit_interface/get_trajectory_server.py
|
junyaoshi/interbotix_ros_arms
|
ca37aa98a66acdbd17557d6808d6c32f0ad4799b
|
[
"BSD-2-Clause"
] | null | null | null |
interbotix_examples/interbotix_moveit_interface/get_trajectory_server.py
|
junyaoshi/interbotix_ros_arms
|
ca37aa98a66acdbd17557d6808d6c32f0ad4799b
|
[
"BSD-2-Clause"
] | null | null | null |
#!/usr/bin/env python
from __future__ import print_function
from moveit_python_interface import MoveGroupPythonInteface
from geometry_msgs.msg import Pose
import rospy
from interbotix_moveit_interface.srv import GetTrajectory, GetTrajectoryResponse
# flags
GO_TO_START_JOINT_VALUES = False # whether to use execute a plan to go to start joint values
EXECUTE_PLAN = False # whether to execute the plan generated for reaching pose goal
ROBOT_NAME = 'wx200' # name of the robot
DOF = 5 # robot's degrees of freedom
# mesh object path dictionary
MESH_OBJECT_FILENAMES = {"chair_bernhard_0146/left_leg": "furniture/env/models/assets/objects/chair_bernhard_0146/left_leg.stl",
"chair_bernhard_0146/right_leg": "furniture/env/models/assets/objects/chair_bernhard_0146/right_leg.stl",
"chair_bernhard_0146/seat": "furniture/env/models/assets/objects/chair_bernhard_0146/seat.stl",
"table_klubbo_0743/leg1": "furniture/env/models/assets/objects/table_klubbo_0743/leg1.stl",
"table_klubbo_0743/leg2": "furniture/env/models/assets/objects/table_klubbo_0743/leg2.stl",
"table_klubbo_0743/leg3": "furniture/env/models/assets/objects/table_klubbo_0743/leg3.stl",
"table_klubbo_0743/leg4": "furniture/env/models/assets/objects/table_klubbo_0743/leg4.stl",
"table_klubbo_0743/table_top": "furniture/env/models/assets/objects/table_klubbo_0743/table_top.stl"}
def generate_trajectory(req):
"""
Handles service request message
:param req: service request message
:return: service response message
"""
start = rospy.get_time()
print("Generating joint trajectory for the following request... \n"
"--- Position: [{} {} {}]\n"
"--- Start joint values: {}\n"
"--- Object names: {}".format(req.x, req.y, req.z, req.start_joint_values, req.object_names))
# add mesh object to scene
for i in range(len(req.object_names)):
object_name = req.object_names[i]
object_pose = req.object_poses[i]
object_size_vector3 = req.object_sizes[i]
object_size = (object_size_vector3.x, object_size_vector3.y, object_size_vector3.z)
try:
object_filename = MESH_OBJECT_FILENAMES[object_name]
if object_name in interface.scene.get_known_object_names():
interface.move_mesh(object_name, object_pose)
else:
interface.add_mesh(object_name, object_pose, object_filename, object_size)
except KeyError:
print("Unrecognized object name: {}".format(object_name))
# remove objects no longer in scene
objects_no_longer_in_scene = list(set(interface.scene.get_known_object_names()) - set(req.object_names))
for object in objects_no_longer_in_scene:
interface.remove_mesh(object)
if GO_TO_START_JOINT_VALUES:
interface.go_to_joint_state(req.start_joint_values)
target_position = [req.x, req.y, req.z]
trajectory_exists, plan = interface.plan_ee_position(target_position,
req.start_joint_values,
execute_plan=EXECUTE_PLAN)
end = rospy.get_time()
print("Trajectory exists?: {} ".format(trajectory_exists))
print("Trajectory sent to client!")
print("Objects in scene after trajectory update: {}".format(interface.scene.get_known_object_names()))
print("Processed request in {} seconds".format(end - start))
return GetTrajectoryResponse(plan.joint_trajectory, trajectory_exists)
if __name__ == "__main__":
interface = MoveGroupPythonInteface(robot_name=ROBOT_NAME, dof=DOF)
# remove objects in moveit scene because of previous runs
for name in interface.scene.get_known_object_names():
interface.scene.remove_world_object(name)
s = rospy.Service('get_trajectory', GetTrajectory, generate_trajectory)
print("Ready to generate trajectory.")
rospy.spin()
| 48
| 130
| 0.677443
| 515
| 4,176
| 5.205825
| 0.264078
| 0.041029
| 0.055949
| 0.071615
| 0.331966
| 0.211115
| 0.177546
| 0.177546
| 0.073853
| 0
| 0
| 0.024876
| 0.229885
| 4,176
| 86
| 131
| 48.55814
| 0.808769
| 0.104167
| 0
| 0
| 0
| 0
| 0.283981
| 0.192017
| 0
| 0
| 0
| 0
| 0
| 1
| 0.017241
| false
| 0
| 0.086207
| 0
| 0.12069
| 0.137931
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
9b7271707272e07905c19e8e64deead59d30237a
| 857
|
py
|
Python
|
data_utils/merge_dev_set.py
|
weihezhai/HatefulMemesChallenge
|
04f52643c0864d1efb6c0a9c674db42764f6834c
|
[
"MIT"
] | 63
|
2020-12-09T18:58:16.000Z
|
2022-03-21T02:34:35.000Z
|
data_utils/merge_dev_set.py
|
weihezhai/HatefulMemesChallenge
|
04f52643c0864d1efb6c0a9c674db42764f6834c
|
[
"MIT"
] | 5
|
2021-01-29T10:33:04.000Z
|
2021-08-25T14:15:27.000Z
|
data_utils/merge_dev_set.py
|
weihezhai/HatefulMemesChallenge
|
04f52643c0864d1efb6c0a9c674db42764f6834c
|
[
"MIT"
] | 18
|
2020-12-11T20:36:04.000Z
|
2021-12-12T07:04:20.000Z
|
import os
import json
import fire
def main(meme_dir):
print("meme_dir: ", meme_dir)
print("-" * 100)
meme_val_anno_1 = []
with open(os.path.join(meme_dir, "dev_seen.jsonl")) as f:
for line in f:
meme_val_anno_1.append(json.loads(line))
meme_val_anno_2 = []
with open(os.path.join(meme_dir, "dev_unseen.jsonl")) as f:
for line in f:
meme_val_anno_2.append(json.loads(line))
id2val = {}
for anno in meme_val_anno_1:
id2val[anno['id']] = anno
for anno in meme_val_anno_2:
id2val[anno['id']] = anno
out_path = os.path.join(meme_dir, "dev_all.jsonl")
with open(out_path, 'w') as f:
for anno_line in id2val.values():
seri_line = json.dumps(anno_line)
f.write(f"{seri_line}\n")
if __name__ == "__main__":
fire.Fire(main)
| 25.205882
| 63
| 0.606768
| 137
| 857
| 3.49635
| 0.29927
| 0.087683
| 0.137787
| 0.075157
| 0.363257
| 0.363257
| 0.237996
| 0.237996
| 0.121086
| 0.121086
| 0
| 0.020472
| 0.259043
| 857
| 34
| 64
| 25.205882
| 0.733858
| 0
| 0
| 0.153846
| 0
| 0
| 0.09324
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.038462
| false
| 0
| 0.115385
| 0
| 0.153846
| 0.076923
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
9b76216fa16aa9281bbd570ee1371176ada29324
| 4,442
|
py
|
Python
|
examples/cardless_credit_example.py
|
glendaesutanto/xendit-python
|
f9b131882ff7d045f2e2c6518933d1594efba3e6
|
[
"MIT"
] | 10
|
2020-10-31T23:34:34.000Z
|
2022-03-08T19:08:55.000Z
|
examples/cardless_credit_example.py
|
glendaesutanto/xendit-python
|
f9b131882ff7d045f2e2c6518933d1594efba3e6
|
[
"MIT"
] | 22
|
2020-07-30T14:25:07.000Z
|
2022-03-31T03:55:46.000Z
|
examples/cardless_credit_example.py
|
glendaesutanto/xendit-python
|
f9b131882ff7d045f2e2c6518933d1594efba3e6
|
[
"MIT"
] | 11
|
2020-07-28T08:09:40.000Z
|
2022-03-18T00:14:02.000Z
|
import time
from print_running_function import print_running_function
# Hackish method to import from another directory
# Useful while xendit-python isn't released yet to the public
import importlib.machinery
loader = importlib.machinery.SourceFileLoader("xendit", "../xendit/__init__.py")
xendit = loader.load_module("xendit")
class CreateCardlessCreditPayment:
@staticmethod
def run(xendit_instance, **kwargs):
try:
cardless_credit_payment = xendit_instance.CardlessCredit.create_payment(
**kwargs
)
print(cardless_credit_payment)
except xendit.XenditError as e:
print("Error status code:", e.status_code)
print("Error message:", e)
@staticmethod
def example(xendit_instance):
cardless_credit_items = []
cardless_credit_items.append(
{
"id": "item-123",
"name": "Phone Case",
"price": 200000,
"type": "Smartphone",
"url": "http://example.com/phone/phone_case",
"quantity": 2,
}
)
customer_details = {
"first_name": "customer first name",
"last_name": "customer last name",
"email": "customer@email.com",
"phone": "0812332145",
}
shipping_address = {
"first_name": "first name",
"last_name": "last name",
"address": "Jl Teknologi No. 12",
"city": "Jakarta",
"postal_code": "12345",
"phone": "081513114262",
"country_code": "IDN",
}
args = {
"cardless_credit_type": xendit.CardlessCreditType.KREDIVO,
"external_id": f"id-{int(time.time())}",
"amount": 10000,
"payment_type": "3_months",
"items": cardless_credit_items,
"customer_details": customer_details,
"shipping_address": shipping_address,
"redirect_url": "https://my-shop.com/home",
"callback_url": "https://my-shop.com/callback",
}
print_running_function("xendit.CardlessCredit.create_payment", args)
CreateCardlessCreditPayment.run(xendit_instance, **args)
class CalculatePaymentType:
@staticmethod
def run(xendit_instance, **kwargs):
try:
cardless_credit_payment_types = xendit_instance.CardlessCredit.calculate_payment_type(
**kwargs
)
print(cardless_credit_payment_types)
except xendit.XenditError as e:
print("Error status code:", e.status_code)
print("Error message:", e)
@staticmethod
def example(xendit_instance):
cardless_credit_items = []
cardless_credit_items.append(
{
"id": "item-123",
"name": "Phone Case",
"price": 200000,
"type": "Smartphone",
"url": "http://example.com/phone/phone_case",
"quantity": 2,
}
)
args = {
"cardless_credit_type": xendit.CardlessCreditType.KREDIVO,
"amount": 10000,
"items": cardless_credit_items,
}
print_running_function("xendit.CardlessCredit.calculate_payment_type", args)
CalculatePaymentType.run(xendit_instance, **args)
def ask_cardless_credit_input():
print("Input the action that you want to use")
print("0. Exit")
print("1. Create Payment / Generate Checkout URL")
print("2. Calculate Payment Types")
try:
return int(input())
except ValueError:
print("Invalid input. Please type a number")
return ask_cardless_credit_input()
def cardless_credit_example(xendit_instance):
cardless_credit_input = ask_cardless_credit_input()
while cardless_credit_input != 0:
if cardless_credit_input == 1:
print("Running example of Create Payment / Generate Checkout URL")
CreateCardlessCreditPayment.example(xendit_instance)
elif cardless_credit_input == 2:
print("Running example of Calculate Payment Types")
CalculatePaymentType.example(xendit_instance)
cardless_credit_input = ask_cardless_credit_input()
| 35.822581
| 99
| 0.579244
| 420
| 4,442
| 5.885714
| 0.302381
| 0.124595
| 0.069175
| 0.046926
| 0.436893
| 0.338997
| 0.338997
| 0.296117
| 0.296117
| 0.296117
| 0
| 0.021833
| 0.319451
| 4,442
| 123
| 100
| 36.113821
| 0.795898
| 0.024088
| 0
| 0.416667
| 0
| 0
| 0.246852
| 0.028986
| 0
| 0
| 0
| 0
| 0
| 1
| 0.055556
| false
| 0
| 0.037037
| 0
| 0.12963
| 0.148148
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
9b79ce41c55d1514dd77ba2f1d765d1a882ab8ed
| 2,220
|
py
|
Python
|
tests/test_lines.py
|
phipleg/trafaret-config
|
0063a5027e7db015e0b0b0dafd8159df409121f2
|
[
"Apache-2.0",
"MIT"
] | 26
|
2016-12-07T16:40:44.000Z
|
2020-11-09T03:27:23.000Z
|
tests/test_lines.py
|
phipleg/trafaret-config
|
0063a5027e7db015e0b0b0dafd8159df409121f2
|
[
"Apache-2.0",
"MIT"
] | 6
|
2017-10-27T13:50:21.000Z
|
2020-09-24T17:08:48.000Z
|
tests/test_lines.py
|
phipleg/trafaret-config
|
0063a5027e7db015e0b0b0dafd8159df409121f2
|
[
"Apache-2.0",
"MIT"
] | 5
|
2016-12-05T10:23:29.000Z
|
2020-06-05T15:31:24.000Z
|
import unittest
from textwrap import dedent
import trafaret as T
from .util import get_err
class TestSMTP(unittest.TestCase):
TRAFARET = T.Dict({
T.Key('port'): T.Int(),
T.Key('smtp'): T.Dict({
'server': T.String(),
'port': T.Int(),
'ssl_port': T.Int(),
}),
})
def test_bad(self):
self.assertEqual(get_err(self.TRAFARET, u"""\
smtp:
server: mail.example.org
port: unknown
ssl_port: NaN
port: ???
"""),dedent("""\
config.yaml:3: smtp.port: value can't be converted to int
-> 'unknown'
config.yaml:4: smtp.ssl_port: value can't be converted to int
-> 'NaN'
config.yaml:5: port: value can't be converted to int
-> '???'
"""))
class TestList(unittest.TestCase):
TRAFARET = T.Dict({
"hosts": T.List(T.String() & T.Regexp("\w+:\d+")),
})
def test_ok(self):
self.assertEqual(get_err(self.TRAFARET, u"""\
hosts:
- bear:8080
- cat:7070
"""), None)
def test_err(self):
self.assertEqual(get_err(self.TRAFARET, u"""\
hosts:
- bear:8080
- cat:x
"""),
"config.yaml:3: hosts[1]: "
"does not match pattern \\w+:\\d+\n"
)
class TestInvalidYaml(unittest.TestCase):
TRAFARET = T.Dict()
def test_star(self):
self.assertIn(get_err(self.TRAFARET, u"""\
port: 8080
host: localhost
*: 1
"""),{
# message depends on whether we use libyaml (C speedups) or not
dedent( # with C speedups
"config.yaml:3: did not find expected alphabetic or "
"numeric character\n"
"config.yaml:3: while scanning an alias\n"
), dedent( # without C speedups
"config.yaml:3: expected alphabetic or numeric character, "
"but found ':'\n"
"config.yaml:3: while scanning an alias\n"
)})
| 27.75
| 75
| 0.476577
| 245
| 2,220
| 4.269388
| 0.371429
| 0.076482
| 0.063098
| 0.068834
| 0.494264
| 0.285851
| 0.285851
| 0.285851
| 0.166348
| 0.10325
| 0
| 0.019259
| 0.391892
| 2,220
| 79
| 76
| 28.101266
| 0.755556
| 0.043243
| 0
| 0.203125
| 0
| 0
| 0.483491
| 0
| 0
| 0
| 0
| 0
| 0.0625
| 1
| 0.0625
| false
| 0
| 0.0625
| 0
| 0.21875
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
9b7a95fc7a817c6bbe96dbb60f85ab236a523c5a
| 919
|
py
|
Python
|
django_requests_logger/admin.py
|
mabouchama/django-requests-logger
|
d28bae0312878cb09f355e8793e56b6a7d1e6f72
|
[
"MIT"
] | 1
|
2019-02-20T09:08:15.000Z
|
2019-02-20T09:08:15.000Z
|
django_requests_logger/admin.py
|
mabouchama/django-requests-logger
|
d28bae0312878cb09f355e8793e56b6a7d1e6f72
|
[
"MIT"
] | 4
|
2022-02-15T12:43:15.000Z
|
2022-02-22T13:23:10.000Z
|
django_requests_logger/admin.py
|
mabouchama/django-requests-logger
|
d28bae0312878cb09f355e8793e56b6a7d1e6f72
|
[
"MIT"
] | 2
|
2022-02-10T06:36:27.000Z
|
2022-02-16T06:58:16.000Z
|
from django.contrib import admin
from django.utils.translation import gettext_lazy as _
from django_requests_logger.models import RequestLog
class RequestLogAdmin(admin.ModelAdmin):
list_display = (
'url', 'method', 'response_status_code', 'created'
)
list_filter = ('method', 'response_status_code')
search_fields = ['url']
fieldsets = (
(None, {
'fields': (
'url', 'method', 'response_status_code', 'created')
}),
(_('Request'), {
'classes': ('collapse',),
'fields': (
'params', 'body', 'headers')
}),
(_('Response'), {
'classes': ('collapse',),
'fields': (
'response_content', 'response_headers')
})
)
readonly_fields = list(d for t in fieldsets for d in t[1]['fields'])
admin.site.register(RequestLog, RequestLogAdmin)
| 28.71875
| 72
| 0.558215
| 84
| 919
| 5.892857
| 0.535714
| 0.060606
| 0.121212
| 0.145455
| 0.137374
| 0.137374
| 0
| 0
| 0
| 0
| 0
| 0.001546
| 0.295974
| 919
| 31
| 73
| 29.645161
| 0.763524
| 0
| 0
| 0.259259
| 0
| 0
| 0.238303
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.111111
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
9b7cb7e95cec056b67de9696402880e28ea63ad1
| 5,662
|
py
|
Python
|
rubybot/cogs/memes.py
|
lakepower/Ruby-Bot
|
332d3f8bc9c8ba2902f67b59e360596778483473
|
[
"MIT"
] | null | null | null |
rubybot/cogs/memes.py
|
lakepower/Ruby-Bot
|
332d3f8bc9c8ba2902f67b59e360596778483473
|
[
"MIT"
] | null | null | null |
rubybot/cogs/memes.py
|
lakepower/Ruby-Bot
|
332d3f8bc9c8ba2902f67b59e360596778483473
|
[
"MIT"
] | null | null | null |
import requests
import bs4
import re
import random
import codecs
import os
from discord.ext import commands
from discord.ext.commands import BucketType
from cogs.utils import checks
path_qaz = os.path.join(os.getcwd(), 'files', 'qaz.txt')
path_riko = os.path.join(os.getcwd(), 'files', 'riko_meme')
nsfw = [
"CngcndGUEAAZh8q.jpg", "Cnsqi9DUsAAtJG_.jpg", "Cnh5oJHUEAAICHv.jpg"
]
class Memes:
def __init__(self, bot):
self.bot = bot
self.init_qaz()
self.init_riko()
def init_qaz(self):
self.filename = path_qaz
self.qaz_file = open(self.filename, 'r')
self.qaz_list = dict()
for line in self.qaz_file:
data = line.split()
qaz_post = ''
qaz_tag = ''
for block in data:
if block.startswith('http'):
qaz_post += block + ' '
else:
qaz_tag += block + ' '
self.qaz_list[qaz_tag.strip()] = qaz_post.strip()
self.qaz_file.close()
self.qaz_file = open(self.filename, 'a')
def init_riko(self):
self.riko_memes = os.listdir(path_riko)
@commands.command(name='tsun', no_pm=True)
@commands.cooldown(1, 5, BucketType.user)
async def tsun(self, *, index: str=''):
"""
Displays a post from tsuntsunlive's Instagram.
Original code taken from Chezz at https://github.com/chesszz/MemeBot/blob/master/MemeBot.py
:param index: If an index is specified, the specific post on the front page is retrieved. If not, a random post is grabbed.
"""
msgs = self.get_tsun(index)
for msg in msgs:
await self.bot.say(msg)
def get_tsun(self, index: str):
r = requests.get('https://www.instagram.com/tsuntsunlive/')
html = r.content
soup = bs4.BeautifulSoup(html, 'html.parser')
tag_list = soup.find_all("script", type="text/javascript")
tag_list = [str(tag) for tag in tag_list]
tag_list = sorted(tag_list, key=len)
data_tag = tag_list[-1]
after = index.split()
try:
index = int(after[0])
except ValueError:
index = None
except IndexError:
index = None
post_list = re.split('"caption": "', data_tag)[1:]
if index is None:
post = random.choice(post_list)
else:
post = post_list[index - 1]
caption = post[:re.search('", "likes"', post).start()]
caption = re.sub(r"(\\u[0-9a-f]{4})", lambda match: codecs.decode(match.group(1), "unicode_escape"), caption)
caption = re.sub(r"\\n", "\n", caption)
img_part = post[re.search('"display_src": "', post).end():]
img = img_part[:re.search("\?", img_part).start()]
img = re.sub(r"\\", "", img)
data = [img, caption]
return data
@commands.group(pass_context=True, no_pm=True)
@commands.cooldown(1, 5, BucketType.user)
async def qaz(self, ctx):
"""
Lets you save dank qaz quotes.
If a subcommand is not called, a random qaz quote is displayed.
"""
if ctx.invoked_subcommand is None:
tags = list(self.qaz_list.keys())
await self.bot.say(self.qaz_list[tags[random.randrange(0, len(tags))]])
@qaz.command(pass_context=True, no_pm=True)
async def add(self, ctx, name, *, link):
"""
Adds a qaz quote to the database.
:param name: Name of the qaz quote
:param link: Content of the qaz quote
"""
tag = name
if tag not in list(self.qaz_list.keys()):
print(self.qaz_file.write('\n' + link + ' ' + tag))
self.qaz_file.flush()
self.qaz_list[tag] = link
await self.bot.say('Tag added successfully!')
else:
await self.bot.say('That tag already exists!')
@qaz.command(name='list', pass_context=True, no_pm=True)
async def _list(self, ctx):
"""
Displays a list of all saved qaz quotes.
"""
await self.bot.say(list(self.qaz_list.keys()))
@commands.group(name='riko', pass_context=True, invoke_without_command=True)
@commands.cooldown(1, 5, BucketType.user)
async def riko(self, ctx, filename=""):
"""
Uploads a Riko meme.
Chooses a random meme if filename is not specified.
:param filename: Filename of the meme.
"""
if ctx.invoked_subcommand is None:
await self.upload_riko(filename)
@riko.command(name='list')
async def r_list(self):
await self.bot.say('There are currently {} Riko memes!'.format(str(len(self.riko_memes))))
@riko.command(name='refresh', pass_context=True)
@checks.is_owner()
async def r_refresh(self, ctx):
self.init_riko()
await self.bot.say('Database refreshed!')
async def upload_riko(self, filename):
if not filename in self.riko_memes:
filename = self.riko_memes[random.randrange(0, len(self.riko_memes))]
while filename in nsfw:
filename = self.riko_memes[random.randrange(0, len(self.riko_memes))]
await self.bot.upload(fp=os.path.join(path_riko, filename), content=filename)
print('uploaded riko meme')
@riko.command()
@checks.is_owner()
async def blacklist(self, ctx, filename: str):
nsfw.append(filename)
def setup(bot):
bot.add_cog(Memes(bot))
| 33.502959
| 132
| 0.575062
| 733
| 5,662
| 4.33015
| 0.271487
| 0.02867
| 0.030246
| 0.033081
| 0.182105
| 0.150914
| 0.097038
| 0.097038
| 0.077505
| 0.063642
| 0
| 0.005543
| 0.299011
| 5,662
| 168
| 133
| 33.702381
| 0.794155
| 0
| 0
| 0.137931
| 0
| 0
| 0.081403
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.043103
| false
| 0.043103
| 0.077586
| 0
| 0.137931
| 0.017241
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
9b7e0fc01325c4db573e5b390f7ba040016a6e45
| 11,092
|
py
|
Python
|
tooldb.py
|
SolidifiedRay/layout-web-tool
|
cefba938c1e03ea8da206ec7fb0c6d8a62f1fc1b
|
[
"MIT"
] | 1
|
2020-04-01T15:05:54.000Z
|
2020-04-01T15:05:54.000Z
|
tooldb.py
|
SolidifiedRay/layout-web-tool
|
cefba938c1e03ea8da206ec7fb0c6d8a62f1fc1b
|
[
"MIT"
] | 42
|
2017-05-23T17:19:19.000Z
|
2021-04-26T12:28:47.000Z
|
tooldb.py
|
SolidifiedRay/layout-web-tool
|
cefba938c1e03ea8da206ec7fb0c6d8a62f1fc1b
|
[
"MIT"
] | 7
|
2017-05-04T02:13:07.000Z
|
2020-07-09T10:56:03.000Z
|
"""
<Program Name>
tooldb.py
<Author>
Lukas Puehringer <lukas.puehringer@nyu.edu>
<Started>
June 12, 2017
<Copyright>
See LICENSE for licensing information.
<Purpose>
A basic collection of software supply chain tools in four categories
- vcs (version control systems)
- building
- qa (quality assurance)
- package (packaging)
The tools are presented to the user on the different pages of the web wizard
as options to choose from to define a custom supply chain
TODO:
- Update! Some of the tools might be not used at all while other popular
tools are missing
- Clean up! Common commands, logo, ...
"""
COLLECTION = {
"vcs": [{
"type": "",
"prog_lang": "",
"name": "Git",
"logo": "https://git-scm.com/images/logos/downloads/Git-Icon-1788C.png",
"cmd": "git clone <repo>"
}, {
"type": "",
"prog_lang": "",
"name": "SVN",
"logo": "https://subversion.apache.org/images/svn-square.jpg",
"cmd": "svn checkout <repo>"
}, {
"type": "",
"prog_lang": "",
"name": "Mercurial",
"logo": "https://www.mercurial-scm.org/logo-droplets-200.png",
"cmd": "hg clone <repo>"
}, {
"type": "",
"prog_lang": "",
"name": "GNU Bazaar ",
"logo": "http://bazaar.canonical.com/bzricons/bazaar-logo.png",
"cmd": "bzr branch <remote repo> <local repo>"
}, {
"type": "",
"prog_lang": "",
"name": "Monotone",
"logo": "https://www.monotone.ca/res/logo.png",
"cmd": "mtn --db=<db> sync <repo>"
}, {
"type": "",
"prog_lang": "",
"name": "Fossil",
"logo": "https://www.fossil-scm.org/index.html/doc/trunk/www/fossil3.gif",
"cmd": "fossil clone <repo>"
}, {
"type": "",
"prog_lang": "",
"name": "Darcs",
"logo": "http://darcs.net/img/logos/logo.png",
"cmd": "darcs clone <repo>"
}, {
"type": "",
"prog_lang": "",
"name": "CVS",
"logo": "",
"cmd": "cvs co <repo>"
}, {
# "type": "",
# "prog_lang": "",
# "name": "AccuRev",
# "logo": "https://pbs.twimg.com/profile_images/378800000092420461/aeab81f94d12ea387f7cae8868892112_400x400.png",
# "cmd": "accurev mkstream -s <stream> -b <backing-stream>"
# }, {
# "type": "",
# "prog_lang": "",
# "name": "Veracity",
# "logo": "https://discoversdkcdn.azureedge.net/runtimecontent/companyfiles/5875/3191/thumbnail.png",
# "cmd": ""
# }, {
# "type": "",
# "prog_lang": "",
# "name": "ArX",
# "logo": "https://arxequity.com/wp-content/themes/arx/img/logo_big_blue.png",
# "cmd": "arx make-archive ArchiveName RepositoryDirectory"
# }, {
"type": "",
"prog_lang": "",
"name": "BitKeeper",
"logo": "http://www.bitkeeper.org/man/BitKeeper_SN_Blue.png",
"cmd": "bk clone <repo>"
# }, {
# "type": "",
# "prog_lang": "",
# "name": "SVK",
# "logo": "",
# "cmd": "svk commit [PATH\u2026]"
# }, {
# "type": "",
# "prog_lang": "",
# "name": "Plastic SCM",
# "logo": "https://pbs.twimg.com/profile_images/378800000542266610/114e3495e712c5bc736970326ecfb9f2_400x400.png",
# "cmd": "cm mkwk [name]"
# }, {
# "type": "",
# "prog_lang": "",
# "name": "Team Foundation Server",
# "logo": "https://szul.blob.core.windows.net/images/team-foundation-server-2015-header.png",
# "cmd": "tf get [itemspec]"
}],
"building": [{
"type": "make_based",
"prog_lang": "",
"logo": "https://upload.wikimedia.org/wikipedia/commons/thumb/3/35/Tux.svg/150px-Tux.svg.png",
"name": "make",
"cmd": "make [-einpqrst] [-f makefile]... [-k|-S] [macro=value...] [target_name...]"
}, {
"type": "build_script_generation",
"prog_lang": "",
"logo": "",
"name": "configure",
"cmd": "./configure"
}, {
"type": "non_make_based",
"prog_lang": "Java, C, C++",
"logo": "http://antinstaller.sourceforge.net/manual/images/ant_logo_large.gif",
"name": "Apache ANT",
"cmd": "install, init, all"
}, {
"type": "non_make_based",
"prog_lang": "",
"logo": "http://www.scala-sbt.org/assets/typesafe_sbt_svg.svg",
"name": "sbt",
"cmd": "compile"
}, {
"type": "non_make_based",
"prog_lang": "MS Visual Studio, C++",
"logo": "http://www.eitcafe.com/wp-content/uploads/2016/07/msbuild.jpg",
"name": "MS Build",
"cmd": "MSBuild.exe MyProj.proj /property:Configuration=Debug , cl /EHsc hello.cpp"
}, {
"type": "non_make_based",
"prog_lang": "Ruby",
"logo": "",
"name": "Rake",
"cmd": "rake"
}, {
"type": "",
"prog_lang": "XML(Project Object Model)",
"logo": "https://maven.apache.org/images/maven-logo-black-on-white.png",
"name": "Maven",
"cmd": "mvn compile"
}, {
"type": "non_make_based",
"prog_lang": "Python",
"logo": "http://scons.org/images/SCons.png",
"name": "Scons",
"cmd": "scons foo.out"
}],
"qa": [{
"type": "Unit Testing",
"prog_lang": "C",
"name": "Check",
"logo": "",
"cmd": ""
}, {
"type": "Unit Testing",
"prog_lang": "C",
"name": "AceUnit",
"logo": "",
"cmd": ""
}, {
"type": "Unit Testing",
"prog_lang": "C#",
"name": "csUnit",
"logo": "",
"cmd": ""
}, {
"type": "Unit Testing",
"prog_lang": "C#",
# "name": "Visual Studio Unit Testing Framework",
"name": "Visual Studio",
"logo": "http://www.qatestingtools.com/sites/default/files/tools_shortcuts/Visual%20Studio%20Unite%20Testing%20Framework%20150.png",
"cmd": ""
}, {
"type": "Unit Testing",
"prog_lang": "C++",
# "name": "Parasoft C/C++test",
"name": "Parasoft",
"logo": "https://discoversdkcdn.azureedge.net/runtimecontent/companyfiles/5703/1899/thumbnail.png",
"cmd": ""
}, {
"type": "Unit Testing",
"prog_lang": "C++",
"name": "CppUnit",
"logo": "http://www.howcsharp.com/img/0/9/cppunit-300x225.jpg",
"cmd": ""
}, {
"type": "Unit Testing",
"prog_lang": "Haskell",
"name": "HUnit",
"logo": "",
"cmd": ""
}, {
"type": "Unit Testing",
"prog_lang": "Haskell",
"name": "QuickCheck",
"logo": "",
"cmd": ""
}, {
"type": "Unit Testing",
"prog_lang": "Java",
"name": "JUnit",
"logo": "http://www.swtestacademy.com/wp-content/uploads/2015/11/Junit_Logo.png",
"cmd": ""
}, {
"type": "Unit Testing",
"prog_lang": "Java",
"name": "Jtest",
"logo": "http://www.bj-zhzt.com/datacache/pic/390_260_b91e82190f00f190d60d78bf53f6352b.png",
"cmd": ""
}, {
"type": "Unit Testing",
"prog_lang": "JavaScript",
"name": "Jasmine",
"logo": "https://upload.wikimedia.org/wikipedia/en/thumb/2/22/Logo_jasmine.svg/1028px-Logo_jasmine.svg.png",
"cmd": ""
}, {
"type": "Unit Testing",
"prog_lang": "JavaScript",
"name": "Unit.js",
"logo": "https://upload.wikimedia.org/wikipedia/en/e/ec/Unit_JS_logo.png",
"cmd": ""
}, {
"type": "Unit Testing",
"prog_lang": "PHP",
"name": "SimpleTest",
"logo": "http://www.simpletest.org/images/simpletest-logo.png",
"cmd": ""
}, {
"type": "Unit Testing",
"prog_lang": "PHP",
"name": "PHPUnit",
"logo": "http://4.bp.blogspot.com/-xrvHPUBqc7Y/Ucxe5ZYDVYI/AAAAAAAAAVE/cXtFm0NcE9A/s500/logo.png",
"cmd": ""
}, {
"type": "Unit Testing",
"prog_lang": "Python",
"name": "unittest",
"logo": "",
"cmd": ""
}, {
"type": "Unit Testing",
"prog_lang": "Python",
"name": "doctest",
"logo": "",
"cmd": ""
}, {
"type": "Unit Testing",
"prog_lang": "R",
"name": "RUnit",
"logo": "",
"cmd": ""
}, {
"type": "Unit Testing",
"prog_lang": "R",
"name": "testthat",
"logo": "https://d21ii91i3y6o6h.cloudfront.net/gallery_images/from_proof/13597/small/1466619792/rstudio-hex-testthat.png",
"cmd": ""
}, {
"type": "Unit Testing",
"prog_lang": "Scala",
"name": "ScalaTest",
"logo": "http://media.tumblr.com/ec3c87095fe8a21216c516606afffdcc/tumblr_inline_mtskzebUcv1s17bu5.jpg",
"cmd": ""
}, {
"type": "Unit Testing",
"prog_lang": "Scala",
"name": "ScUnit",
"logo": "",
"cmd": ""
}, {
"type": "Continuous Integration",
"prog_lang": "all",
"name": "Travis CI",
"logo": "https://cdn.travis-ci.com/images/logos/TravisCI-Mascot-1-20feeadb48fc2492ba741d89cb5a5c8a.png",
"cmd": ""
}, {
"type": "Continuous Integration",
"prog_lang": "Java",
"name": "Jenkins",
"logo": "https://www.cloudbees.com/sites/default/files/Jenkins_8.png",
"cmd": ""
}, {
"type": "Continuous Integration",
"prog_lang": "Java",
"name": "TeamCity",
"logo": "http://workingwithdevs.com/wp-content/uploads/2014/05/TeamCity-logo.png",
"cmd": ""
}, {
"type": "Continuous Integration",
"prog_lang": "all",
"name": "Bamboo",
"logo": "https://www.vectorcast.com/sites/default/themes/vectorsoftware/images/Bamboo-logo_clipped.png",
"cmd": ""
}, {
"type": "Continuous Integration",
"prog_lang": "all",
"name": "Codeship",
"logo": "http://rixrix.github.io/ci-talk-codeship/images/logo_codeship_colour.png",
"cmd": ""
}, {
"type": "Continuous Integration",
"prog_lang": "all",
"name": "CircleCI",
"logo": "https://circleci.com/circleci-logo-stacked-fb.png",
"cmd": ""
}, {
"type": "Continuous Integration",
"prog_lang": "all",
"name": "Buildbot",
"logo": "https://buildbot.net/img/nut.png",
"cmd": ""
}, {
"type": "Linting",
"prog_lang": "Python",
"name": "Pylint",
"logo": "https://carlchenet.com/wp-content/uploads/2013/08/pylint-logo.png",
"cmd": ""
}, {
"type": "Linting",
"prog_lang": "Python",
"name": "Flake8",
"logo": "",
"cmd": ""
}, {
"type": "Linting",
"prog_lang": "Python",
"name": "PyChecker",
"logo": "",
"cmd": ""
}, {
"type": "Linting",
"prog_lang": "Java",
"name": "Checkstyle",
"logo": "http://checkstyle.sourceforge.net/images/header-checkstyle-logo.png",
"cmd": ""
}, {
"type": "Linting",
"prog_lang": "Java",
"name": "Coverity",
"logo": "https://2015.appsecusa.org/c/wp-content/uploads/2014/07/coveritylogo.png",
"cmd": ""
}, {
"type": "Linting",
"prog_lang": "C/C++",
"name": "Lint",
"logo": "",
"cmd": ""
}, {
"type": "Linting",
"prog_lang": "C/C++",
"name": "Cppcheck",
"logo": "https://cdn.portableapps.com/CppcheckPortable_128.png",
"cmd": ""
}, {
"type": "Unit Testing",
"prog_lang": "C",
"name": "Check",
"logo": "",
"cmd": ""
}, ],
"package": [{
"type": "",
"prog_lang": "",
"logo": "",
"name": "Tar",
"cmd": "tar -cvf afiles.tar file1 file2 file3"
}, {
"type": "",
"prog_lang": "",
"logo": "",
"name": "Zip",
"cmd": ""
}, {
"type": "",
"prog_lang": "",
"logo": "",
"name": "Wheel",
"cmd": "python setup.py bdist_wheel"
}, {
"type": "",
"prog_lang": "",
"logo": "",
"name": "MSI",
"cmd": ""
}]
}
| 26.857143
| 136
| 0.542102
| 1,214
| 11,092
| 4.865733
| 0.323723
| 0.083968
| 0.035551
| 0.067547
| 0.357542
| 0.3269
| 0.284916
| 0.208735
| 0.129677
| 0.027087
| 0
| 0.034737
| 0.218806
| 11,092
| 413
| 137
| 26.857143
| 0.646971
| 0.163
| 0
| 0.640351
| 0
| 0.046784
| 0.580041
| 0.005628
| 0
| 0
| 0
| 0.002421
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
9b7e3f441634f0a952e1cc4ac8faf39f1d5c0e8a
| 3,587
|
py
|
Python
|
flask_tutorial/marshmallow_demo/nesting_schemas.py
|
ftconan/python3
|
eb63ba33960072f792ecce6db809866b38c402f8
|
[
"MIT"
] | 1
|
2018-12-19T22:07:56.000Z
|
2018-12-19T22:07:56.000Z
|
marshmallow_demo/nesting_schemas.py
|
ftconan/flask-tutorial
|
d5164c93b5e6a6e3d2b8980e4b846adb7cb21aee
|
[
"MIT"
] | 12
|
2020-03-14T05:32:26.000Z
|
2022-03-12T00:08:49.000Z
|
marshmallow_demo/nesting_schemas.py
|
ftconan/flask-tutorial
|
d5164c93b5e6a6e3d2b8980e4b846adb7cb21aee
|
[
"MIT"
] | 1
|
2018-12-19T22:08:00.000Z
|
2018-12-19T22:08:00.000Z
|
# coding=utf-8
"""
@author: magician
@date: 2018/9/13
"""
import datetime as dt
from marshmallow import Schema, fields, pprint
class User(object):
"""
User
"""
def __init__(self, name, email):
self.name = name
self.email = email
self.created_at = dt.datetime.now()
self.friends = []
self.employer = None
class Blog(object):
def __init__(self, title, author):
self.title = title
self.author = author
class UserSchema(Schema):
"""
user schema
"""
name = fields.String()
email = fields.Email()
# created_at = fields.DateTime()
# friends = fields.Nested('self', only='name', many=True)
friends = fields.Nested('self', many=True)
# use the 'exclude' argument to avoid infinite recursion
employer = fields.Nested('self', exclude=('employer', ), default=None)
class BlogSchema(Schema):
"""
blog schema
"""
title = fields.String()
# nested field
author = fields.Nested(UserSchema)
class BlogSchema2(Schema):
"""
blog schema2
specifying which fields to nest
"""
title = fields.String()
# nested field
author = fields.Nested(UserSchema, only=['email'])
class SiteSchema(Schema):
"""
nest using dot delimiters
"""
blog = fields.Nested(BlogSchema2)
class AuthorSchema(Schema):
"""
Make sure to use the 'only' or 'exclude' params
to avoid infinite recursion
"""
books = fields.Nested('BookSchema', many=True, exclude=('author', ))
class Meta:
fields = ('id', 'name', 'books')
class BookSchema(Schema):
author = fields.Nested(AuthorSchema, only=('id', 'name'))
class Meta:
fields = ('id', 'title', 'author')
if __name__ == '__main__':
user = User(name='Monty', email='monty@python.org')
blog = Blog(title='Something Completely Different', author=user)
result = BlogSchema().dump(blog)
pprint(result)
# specifying which fields to nest
schema = BlogSchema2()
result = schema.dump(blog)
pprint(result)
# nest using dot delimiters
schema = SiteSchema(only=['blog.author.email'])
site = {
'blog': {
'author': {'email': u'monty@python.org'}
}
}
result = schema.dump(site)
pprint(result)
# only value deserialized
# new_user = {
# 'name': 'Steve',
# 'email': 'steve@example.com',
# 'friends': ['Mike', 'Joe']
# }
# serialized_data = UserSchema().dump(new_user)
# pprint(serialized_data)
# deserialized_data = UserSchema().load(result)
# pprint(deserialized_data)
# two-way nesting
# author = Author(name='William Faulkner')
# book = Book(title='As I Lay Dying', author=author)
# book_result = BookSchema().dump(book)
# pprint(book_result, indent=2)
# {
# "id": 124,
# "title": "As I Lay Dying",
# "author": {
# "id": 8,
# "name": "William Faulkner"
# }
# }
# author_result = AuthorSchema().dump(author)
# pprint(author_result, indent=2)
# {
# "id": 8,
# "name": "William Faulkner",
# "books": [
# {
# "id": 124,
# "title": "As I Lay Dying"
# }
# ]
# }
# nesting a schema within itself
user = User('Steve', 'steve@example.com')
user.friends.append(User('Mike', 'mike@example.com'))
user.friends.append(User('Joe', 'joe@example.com'))
user.employer = User('Dirk', 'dirk@example.com')
result = UserSchema().dump(user)
pprint(result, indent=2)
| 23.444444
| 74
| 0.583496
| 394
| 3,587
| 5.241117
| 0.27665
| 0.046489
| 0.023245
| 0.015981
| 0.165617
| 0.11816
| 0.074576
| 0.054237
| 0.054237
| 0
| 0
| 0.008709
| 0.26373
| 3,587
| 152
| 75
| 23.598684
| 0.773192
| 0.343184
| 0
| 0.125
| 0
| 0
| 0.115088
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.035714
| false
| 0
| 0.035714
| 0
| 0.446429
| 0.089286
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
9b7ec91f09b0e08306d5eb9d01d89bc063dc7870
| 3,906
|
py
|
Python
|
integration_tests/util/generate_scene.py
|
carderne/raster-vision
|
915fbcd3263d8f2193e65c2cd0eb53e050a47a01
|
[
"Apache-2.0"
] | 1,577
|
2017-05-22T14:22:00.000Z
|
2022-03-31T02:03:25.000Z
|
integration_tests/util/generate_scene.py
|
carderne/raster-vision
|
915fbcd3263d8f2193e65c2cd0eb53e050a47a01
|
[
"Apache-2.0"
] | 933
|
2017-05-09T20:25:02.000Z
|
2022-03-31T10:22:59.000Z
|
integration_tests/util/generate_scene.py
|
monocilindro/raster-vision
|
fc181a6f31f085affa1ee12f0204bdbc5a6bf85a
|
[
"Apache-2.0"
] | 336
|
2017-05-15T16:30:44.000Z
|
2022-03-28T06:19:33.000Z
|
import random
import click
import numpy as np
import rasterio
from rasterio.transform import from_origin
from rastervision.core.box import Box
from rastervision.data import (RasterioCRSTransformer, ObjectDetectionLabels,
ObjectDetectionGeoJSONStore)
from rastervision.core.class_map import (ClassItem, ClassMap)
@click.command()
@click.option(
'--task',
'-t',
type=click.Choice(['object_detection', 'semantic_segmentation']),
required=True)
@click.option('--chip_size', '-c', default=300, type=int)
@click.option('--chips_per_dimension', '-s', default=3, type=int)
@click.argument('tiff_path')
@click.argument('labels_path')
def generate_scene(task, tiff_path, labels_path, chip_size,
chips_per_dimension):
"""Generate a synthetic object detection scene.
Randomly generates a GeoTIFF with red and greed boxes denoting two
classes and a corresponding label file. This is useful for generating
synthetic scenes for testing purposes.
"""
class_map = ClassMap([ClassItem(1, 'car'), ClassItem(2, 'building')])
# make extent that's divisible by chip_size
chip_size = chip_size
ymax = chip_size * chips_per_dimension
xmax = chip_size * chips_per_dimension
extent = Box(0, 0, ymax, xmax)
# make windows along grid
windows = extent.get_windows(chip_size, chip_size)
# for each window, make some random boxes within it and render to image
nb_channels = 3
image = np.zeros((ymax, xmax, nb_channels)).astype(np.uint8)
boxes = []
class_ids = []
for window in windows:
# leave some windows blank
if random.uniform(0, 1) > 0.3:
# pick a random class
class_id = random.randint(1, 2)
box = window.make_random_square(50).to_int()
boxes.append(box)
class_ids.append(class_id)
image[box.ymin:box.ymax, box.xmin:box.xmax, class_id - 1] = 255
# save image as geotiff centered in philly
transform = from_origin(-75.163506, 39.952536, 0.000001, 0.000001)
print('Generated {} boxes with {} different classes.'.format(
len(boxes), len(set(class_ids))))
with rasterio.open(
tiff_path,
'w',
driver='GTiff',
height=ymax,
transform=transform,
crs='EPSG:4326',
compression=rasterio.enums.Compression.none,
width=xmax,
count=nb_channels,
dtype='uint8') as dst:
for channel_ind in range(0, nb_channels):
dst.write(image[:, :, channel_ind], channel_ind + 1)
if task == 'object_detection':
# make OD labels and make boxes
npboxes = Box.to_npboxes(boxes)
class_ids = np.array(class_ids)
labels = ObjectDetectionLabels(npboxes, class_ids)
# save labels to geojson
with rasterio.open(tiff_path) as image_dataset:
crs_transformer = RasterioCRSTransformer(image_dataset)
od_file = ObjectDetectionGeoJSONStore(labels_path, crs_transformer,
class_map)
od_file.save(labels)
elif task == 'semantic_segmentation':
label_image = np.zeros((ymax, xmax, 1)).astype(np.uint8)
for box, class_id in zip(boxes, class_ids):
label_image[box.ymin:box.ymax, box.xmin:box.xmax, 0] = class_id
# save labels to raster
with rasterio.open(
labels_path,
'w',
driver='GTiff',
height=ymax,
transform=transform,
crs='EPSG:4326',
compression=rasterio.enums.Compression.none,
width=xmax,
count=1,
dtype='uint8') as dst:
dst.write(label_image[:, :, 0], 1)
if __name__ == '__main__':
generate_scene()
| 33.965217
| 79
| 0.616999
| 470
| 3,906
| 4.96383
| 0.346809
| 0.030862
| 0.029147
| 0.020574
| 0.189027
| 0.120874
| 0.120874
| 0.120874
| 0.120874
| 0.092585
| 0
| 0.024947
| 0.281618
| 3,906
| 114
| 80
| 34.263158
| 0.806486
| 0.132873
| 0
| 0.222222
| 0
| 0
| 0.07211
| 0.018772
| 0
| 0
| 0
| 0
| 0
| 1
| 0.012346
| false
| 0
| 0.098765
| 0
| 0.111111
| 0.012346
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
9b821c976ee4f99cdb762f5c8e9e17a0905d593c
| 321
|
py
|
Python
|
Problem Solving/Algorithms/Introduction to Nim Game.py
|
Muntaha-Islam0019/HackerRank-Solutions
|
caa687aab67461aba69026d3bdc44b62c1dec1c9
|
[
"MIT"
] | null | null | null |
Problem Solving/Algorithms/Introduction to Nim Game.py
|
Muntaha-Islam0019/HackerRank-Solutions
|
caa687aab67461aba69026d3bdc44b62c1dec1c9
|
[
"MIT"
] | null | null | null |
Problem Solving/Algorithms/Introduction to Nim Game.py
|
Muntaha-Islam0019/HackerRank-Solutions
|
caa687aab67461aba69026d3bdc44b62c1dec1c9
|
[
"MIT"
] | null | null | null |
from functools import reduce
games = int(input())
for game in range(games):
number_of_piles = int(input())
pile = list(map(int, input().rstrip().split()))
result = reduce((lambda x, y: x ^ y), pile)
# result = len(pile) % 2 [wont work on test case 2]
print('Second' if result == 0 else 'First')
| 21.4
| 55
| 0.616822
| 49
| 321
| 4
| 0.734694
| 0.122449
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.012048
| 0.224299
| 321
| 14
| 56
| 22.928571
| 0.7751
| 0.152648
| 0
| 0
| 0
| 0
| 0.040741
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.142857
| 0
| 0.142857
| 0.142857
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
9b838f908501c804e2f543284ab15cfe75ddca4d
| 380
|
py
|
Python
|
ipub/params.py
|
ms8r/scriv2epub
|
a8d4ee7abc5b5bef654e40fec25605b34ed43207
|
[
"MIT"
] | 8
|
2016-01-15T21:12:50.000Z
|
2021-10-17T18:47:40.000Z
|
ipub/params.py
|
ms8r/scriv2epub
|
a8d4ee7abc5b5bef654e40fec25605b34ed43207
|
[
"MIT"
] | null | null | null |
ipub/params.py
|
ms8r/scriv2epub
|
a8d4ee7abc5b5bef654e40fec25605b34ed43207
|
[
"MIT"
] | null | null | null |
import os.path
_PATH_PREFIX = os.path.dirname(os.path.realpath(__file__))
_TEMPLATE_PATH = os.path.join(_PATH_PREFIX, 'tmpl')
_TEMPLATE_EXT = '.jinja'
_EPUB_SKELETON_PATH = os.path.join(_PATH_PREFIX, 'epub')
_BASIC_CH_PAR_STYLE = 'par-indent'
_FIRST_CH_PAR_STYLE = 'texttop'
_DROP_CAP_STYLE = 'dropcap'
_CLEAR_STYLE = 'clearit'
_IN_PG_SEC_BREAK_STYLE = 'center-par-tb-space'
| 31.666667
| 59
| 0.776316
| 59
| 380
| 4.40678
| 0.559322
| 0.115385
| 0.076923
| 0.107692
| 0.184615
| 0.184615
| 0
| 0
| 0
| 0
| 0
| 0
| 0.094737
| 380
| 11
| 60
| 34.545455
| 0.755814
| 0
| 0
| 0
| 0
| 0
| 0.168421
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.1
| 0
| 0.1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
9b84b07bfeb0b1498473bf71e8cf00668429868a
| 1,138
|
py
|
Python
|
datastruct/TreeNode.py
|
cocobear/LeetCode-in-Python
|
b4ecd5cb7122467ee479f38497faaabb17e6025e
|
[
"MIT"
] | null | null | null |
datastruct/TreeNode.py
|
cocobear/LeetCode-in-Python
|
b4ecd5cb7122467ee479f38497faaabb17e6025e
|
[
"MIT"
] | null | null | null |
datastruct/TreeNode.py
|
cocobear/LeetCode-in-Python
|
b4ecd5cb7122467ee479f38497faaabb17e6025e
|
[
"MIT"
] | null | null | null |
class TreeNode:
def __init__(self, x):
self.val = x
self.left = None
self.right = None
def __str__(self, depth=0):
ret = ''
if self.right != None:
ret += self.right.__str__(depth + 1)
if not self.val:
ret += '\n'
else:
ret += '\n' + (' '*depth) + str(self.val)
if self.left != None:
ret += self.left.__str__(depth + 1)
return ret
@staticmethod
def constructTree(levelOrder):
# 从给定的层先遍历的数组构建一个二叉树
# 给定的数组为一个满二叉树
def conTree(root, levelOrder, i):
if i >= len(levelOrder):
return
if not levelOrder[i]:
return
tmp = TreeNode(levelOrder[i])
root = tmp
root.left = conTree(root.left, levelOrder, 2*i+1)
root.right = conTree(root.right, levelOrder, 2*i+2)
return root
root = None
return conTree(root, levelOrder, 0)
if __name__ == "__main__":
print(TreeNode.constructTree([3,9,20,None,None,15,7]))
print(TreeNode.constructTree([2,3,4]))
| 27.756098
| 63
| 0.51406
| 129
| 1,138
| 4.348837
| 0.310078
| 0.078431
| 0.042781
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.024862
| 0.363796
| 1,138
| 40
| 64
| 28.45
| 0.75
| 0.027241
| 0
| 0.060606
| 0
| 0
| 0.014493
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.121212
| false
| 0
| 0
| 0
| 0.30303
| 0.060606
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
9b88125439c00c982850039874ca9e2e40963ded
| 17,936
|
py
|
Python
|
src/test_quality_measures.py
|
hackalog/dimension_reduction
|
18c54256f4b1f1fbfe0b99e86b6701e708b7c85c
|
[
"MIT"
] | 1
|
2018-10-22T11:45:45.000Z
|
2018-10-22T11:45:45.000Z
|
src/test_quality_measures.py
|
hackalog/dimension_reduction
|
18c54256f4b1f1fbfe0b99e86b6701e708b7c85c
|
[
"MIT"
] | null | null | null |
src/test_quality_measures.py
|
hackalog/dimension_reduction
|
18c54256f4b1f1fbfe0b99e86b6701e708b7c85c
|
[
"MIT"
] | null | null | null |
import hypothesis.strategies as st
from hypothesis.extra.numpy import arrays
from hypothesis import given
import unittest
import numpy as np
from sklearn.base import BaseEstimator
import inspect
import src.quality_measures as qm
from .logging import logger
# old functions to test against while refactoring
def old_centering_matrix(N):
'''
Returns the N x N centering matrix.
'''
I_N = np.identity(N)
one_N = np.matrix(np.ones(N)).transpose()
J = I_N - one_N * one_N.transpose()/N
return J
def old_doubly_center_matrix(matrix):
'''
Doubly center the matrix. That is, -J * matrix * J.
Note that this matrix must be square.
'''
m, n = matrix.shape
assert m == n, "Matrix must be square"
J = old_centering_matrix(m)
new_matrix = -J * matrix * J
return new_matrix / 2
def old_strain(high_distances, low_distances):
B = qm.doubly_center_matrix(qm.square_matrix_entries(high_distances))
top = qm.square_matrix_entries(B - qm.square_matrix_entries(low_distances))
result = np.sqrt(np.sum(top)/np.sum(qm.square_matrix_entries(B)))
return result
def old_point_strain(high_distances, low_distances):
B = qm.doubly_center_matrix(qm.square_matrix_entries(high_distances))
top = qm.square_matrix_entries(B - qm.square_matrix_entries(low_distances))
result = np.sum(top, axis=1)/np.sum(qm.square_matrix_entries(B))
return result
def knn_to_point_untrustworthiness(high_knn, low_knn, n_neighbors=None,
high_rank=None):
'''
Given the n_neighbors nearest neighbors in high space and low space,
together with the rank matrix, compute the value of
"untrustworthiness" of a point (this is the factor that a point
contributes negatively to trustworthiness).
'''
if n_neighbors is None or high_rank is None:
raise ValueError("n_neighbors and high_rank are required")
point_scores = []
N = high_knn.shape[0]
G_K = qm._trustworthiness_normalizating_factor(n_neighbors, N)
for i, low in enumerate(low_knn):
trust_indices = set(low).difference(set(high_knn[i]))
score = 0
for j in trust_indices:
score += (high_rank[i, j] - n_neighbors) * 2 / G_K
point_scores.append(score)
return np.array(point_scores)
def old_point_untrustworthiness(high_distances=None, low_distances=None,
high_data=None, low_data=None,
metric='euclidean', n_neighbors=None):
'''
Given high/low distances or data, compute the value of
"untrustworthiness" of a point (this is the factor that a point
contributes negatively to trustworthiness).
'''
hd, ld, _ = qm.pairwise_distance_differences(high_distances=high_distances,
low_distances=low_distances,
high_data=high_data,
low_data=low_data,
metric=metric)
if n_neighbors is None:
raise ValueError("n_neighbors is required")
high_rank = qm.rank_matrix(hd)
low_rank = qm.rank_matrix(ld)
high_knn = qm.rank_to_knn(high_rank, n_neighbors=n_neighbors)
low_knn = qm.rank_to_knn(low_rank, n_neighbors=n_neighbors)
point_scores = knn_to_point_untrustworthiness(high_knn, low_knn,
n_neighbors=n_neighbors,
high_rank=high_rank)
return point_scores
class test_estimator(BaseEstimator):
def fit(self, X):
self._return_value = X
def transform(self, X):
return self._return_value
# Start of tests
@given(arrays(np.float, (3, 3), elements=st.floats(min_value=-100,
max_value=100)))
def test_square_matrix_entries(array):
matrix = np.matrix(array)
s_array = array**2
assert (qm.square_matrix_entries(matrix) == s_array).all()
@given(st.integers(min_value=1, max_value=100))
def test_old_new_centering_matrix(N):
assert (qm.centering_matrix(N) == old_centering_matrix(N)).all()
@given(st.integers(min_value=1, max_value=100))
def test_centering_matrix_output(N):
matrix = qm.centering_matrix(N)
assert matrix.shape == (N, N)
@given(arrays(np.float, (3, 3), elements=st.floats(min_value=-100,
max_value=100)))
def test_old_new_doubly_center_matrix(matrix):
assert (qm.doubly_center_matrix(matrix) ==
old_doubly_center_matrix(matrix)).all()
@given(arrays(np.float, (3, 3), elements=st.floats(min_value=-100,
max_value=100)),
arrays(np.float, (3, 2), elements=st.floats(min_value=-100,
max_value=100)))
def test_pairwise_distance_differences_data(high_data, low_data):
hd, ld, dd = qm.pairwise_distance_differences(high_data=high_data,
low_data=low_data)
n_pts = high_data.shape[0]
assert hd.shape == (n_pts, n_pts)
assert ld.shape == (n_pts, n_pts)
assert dd.shape == (n_pts, n_pts)
@given(arrays(np.float, (3, 3), elements=st.floats(min_value=-100,
max_value=100)),
arrays(np.float, (3, 3), elements=st.floats(min_value=-100,
max_value=100)))
def test_pairwise_distance_differences_dist(high_dist, low_dist):
hd, ld, dd = qm.pairwise_distance_differences(high_distances=high_dist,
low_distances=low_dist)
n_pts = high_dist.shape[0]
assert hd.shape == (n_pts, n_pts)
assert ld.shape == (n_pts, n_pts)
assert dd.shape == (n_pts, n_pts)
assert (hd == high_dist).all()
assert (ld == low_dist).all()
assert (dd == (high_dist-low_dist)).all()
@given(arrays(np.float, (3, 3), elements=st.floats(min_value=-100,
max_value=100)),
arrays(np.float, (3, 2), elements=st.floats(min_value=-100,
max_value=100)))
def test_stress_data(high_data, low_data):
stress = qm.stress(high_data=high_data, low_data=low_data)
assert stress.dtype == 'float64'
@given(arrays(np.float, (3, 3), elements=st.floats(min_value=-100,
max_value=100)),
arrays(np.float, (3, 3), elements=st.floats(min_value=-100,
max_value=100)))
def test_stress_distances(high_distances, low_distances):
stress = qm.stress(high_distances=high_distances,
low_distances=low_distances)
assert stress.dtype == 'float64'
@given(arrays(np.float, (3, 3), elements=st.floats(min_value=-100,
max_value=100)),
arrays(np.float, (3, 2), elements=st.floats(min_value=-100,
max_value=100)))
def test_point_stress_data(high_data, low_data):
pstress = qm.point_stress(high_data=high_data, low_data=low_data)
n_pts = high_data.shape[0]
assert pstress.shape == (n_pts, )
@given(arrays(np.float, (3, 3), elements=st.floats(min_value=-100,
max_value=100)),
arrays(np.float, (3, 3), elements=st.floats(min_value=-100,
max_value=100)))
def test_point_stress_distances(high_distances, low_distances):
pstress = qm.point_stress(high_distances=high_distances,
low_distances=low_distances)
n_pts = high_distances.shape[0]
assert pstress.shape == (n_pts, )
@given(arrays(np.float, (3, 3), elements=st.floats(min_value=-100,
max_value=100)),
arrays(np.float, (3, 3), elements=st.floats(min_value=-100,
max_value=100)))
def test_old_new_strain(high_distances, low_distances):
# all zeros raises an error. tested later.
if not (high_distances == 0).all():
assert (qm.strain(high_distances, low_distances) ==
old_strain(high_distances, low_distances)).all()
@given(arrays(np.float, (3, 3), elements=st.floats(min_value=-100,
max_value=100)),
arrays(np.float, (3, 3), elements=st.floats(min_value=-100,
max_value=100)))
def test_old_new_point_strain(high_distances, low_distances):
# all zeros raises an error. tested later.
if not (high_distances == 0).all():
assert (qm.point_strain(high_distances, low_distances) ==
old_point_strain(high_distances, low_distances)).all()
# TODO: Test various input styles.
@given(arrays(np.float, (3, 3), elements=st.floats(min_value=-100,
max_value=100)),
arrays(np.float, (3, 3), elements=st.floats(min_value=-100,
max_value=100)),
st.integers(min_value=1, max_value=3))
def test_old_new_point_untrustworthiness(high_distances, low_distances,
n_neighbors):
old = old_point_untrustworthiness(high_distances=high_distances,
low_distances=low_distances,
n_neighbors=n_neighbors)
new = qm.point_untrustworthiness(high_distances=high_distances,
low_distances=low_distances,
n_neighbors=n_neighbors)
assert all(old == new)
@given(arrays(np.float, (3, 3), elements=st.floats(min_value=-100,
max_value=100)),
arrays(np.float, (3, 3), elements=st.floats(min_value=-100,
max_value=100)),
st.integers(min_value=1, max_value=3))
def test_trustworthiness_distances(high_distances, low_distances,
n_neighbors):
new = qm.trustworthiness(high_distances=high_distances,
low_distances=low_distances,
n_neighbors=n_neighbors)
old_point = old_point_untrustworthiness(high_distances=high_distances,
low_distances=low_distances,
n_neighbors=n_neighbors)
assert new == (1-sum(old_point))
assert new >= 0.0
assert new <= 1.0
@given(arrays(np.float, (3, 3), elements=st.floats(min_value=-100,
max_value=100)),
arrays(np.float, (3, 3), elements=st.floats(min_value=-100,
max_value=100)),
st.integers(min_value=1, max_value=3))
def test_trustworthiness_data(high_data, low_data, n_neighbors):
new = qm.trustworthiness(high_data=high_data,
low_data=low_data,
n_neighbors=n_neighbors)
old_point = old_point_untrustworthiness(high_data=high_data,
low_data=low_data,
n_neighbors=n_neighbors)
assert new == (1-sum(old_point))
assert new >= 0.0
assert new <= 1.0
@given(arrays(np.float, (3, 3), elements=st.floats(min_value=-100,
max_value=100)),
arrays(np.float, (3, 3), elements=st.floats(min_value=-100,
max_value=100)),
st.integers(min_value=1, max_value=3))
def test_trustworthiness_point_scores(high_distances, low_distances,
n_neighbors):
old_point = old_point_untrustworthiness(high_distances=high_distances,
low_distances=low_distances,
n_neighbors=n_neighbors)
new = qm.trustworthiness(point_scores=old_point)
assert new == (1-sum(old_point))
assert new >= 0.0
assert new <= 1.0
@given(arrays(np.float, (3, 3), elements=st.floats(min_value=-100,
max_value=100)),
arrays(np.float, (3, 3), elements=st.floats(min_value=-100,
max_value=100)),
st.integers(min_value=1, max_value=3))
def test_continuity_distances(high_distances, low_distances,
n_neighbors):
new = qm.continuity(high_distances=high_distances,
low_distances=low_distances,
n_neighbors=n_neighbors)
assert new >= 0.0
assert new <= 1.0
@given(arrays(np.float, (3, 3), elements=st.floats(min_value=-100,
max_value=100)),
arrays(np.float, (3, 3), elements=st.floats(min_value=-100,
max_value=100)),
st.integers(min_value=1, max_value=3))
def test_continuity_data(high_data, low_data, n_neighbors):
new = qm.continuity(high_data=high_data,
low_data=low_data,
n_neighbors=n_neighbors)
assert new >= 0.0
assert new <= 1.0
@given(arrays(np.float, (3, 3), elements=st.floats(min_value=-100,
max_value=100)),
arrays(np.float, (3, 3), elements=st.floats(min_value=-100,
max_value=100)),
st.integers(min_value=1, max_value=3))
def test_continuity_point_scores(high_distances, low_distances,
n_neighbors):
point = qm.point_discontinuity(high_distances=high_distances,
low_distances=low_distances,
n_neighbors=n_neighbors)
new = qm.continuity(point_scores=point)
assert new == (1-sum(point))
assert new >= 0.0
assert new <= 1.0
@given(arrays(np.float, (3, 3), elements=st.floats(min_value=-100,
max_value=100),
unique=True),
arrays(np.float, (3, 3), elements=st.floats(min_value=-100,
max_value=100),
unique=True),
arrays(np.bool, (3, 1)),
st.integers(min_value=1, max_value=3))
def test_scorers(hd, ld, target, n_neighbors):
key_l = qm.available_quality_measures().keys()
high_low_l = ["continuity", "stress", "strain", "trustworthiness"]
greater_is_better = ["continuity", "trustworthiness"]
estimator = test_estimator()
estimator.fit(ld)
for key in key_l:
if key in greater_is_better:
val = 1.0
else:
val = -1.0
logger.debug(key)
measure = qm.available_quality_measures()[key]
scorer = qm.available_scorers()[key]
if key in high_low_l:
if 'n_neighbors' in inspect.getfullargspec(measure).args:
m = measure(high_data=hd, low_data=ld, n_neighbors=n_neighbors)
s = scorer(estimator, hd, n_neighbors=n_neighbors)
else:
m = measure(high_data=hd, low_data=ld)
s = scorer(estimator, hd)
elif key == '1nn-error':
m = measure(data=ld, classes=target)
s = scorer(estimator, hd, y=target)
else:
logger.debug("Untested measure:{key}. Add me to test_scorers")
assert False
logger.debug(f"measure:{m}, scorer:{s}")
if m != 0 and s!=0:
assert np.isclose(m/s, val)
else:
assert s == m
@given(arrays(np.float, (3, 3), elements=st.floats(min_value=-100,
max_value=100)))
def test_rank_matrix_compatibility(matrix):
assert (qm.slower_rank_matrix(matrix) == qm.rank_matrix(matrix)).all()
class TestEncoding(unittest.TestCase):
@given(arrays(np.float, (3, 2), elements=st.floats(min_value=-100,
max_value=100)))
def test_doubly_center_matrix_input(self, matrix):
with self.assertRaises(ValueError):
qm.doubly_center_matrix(matrix)
@given(arrays(np.float, (3, 3), elements=st.floats(min_value=-100,
max_value=100)))
def test_pairwise_distance_differences_input(self, matrix):
with self.assertRaises(ValueError):
qm.pairwise_distance_differences(high_data=matrix)
with self.assertRaises(ValueError):
qm.pairwise_distance_differences(high_distances=matrix)
with self.assertRaises(ValueError):
qm.pairwise_distance_differences(low_data=matrix)
with self.assertRaises(ValueError):
qm.pairwise_distance_differences(low_distances=matrix)
def test_point_untrustworthiness_input(self):
with self.assertRaises(ValueError):
qm.point_untrustworthiness()
@given(st.integers(min_value=1, max_value=100))
def test_zero_input_strain(self, N):
matrix = np.zeros((N, N))
with self.assertRaises(ValueError):
qm.strain(high_distances=matrix, low_distances=matrix)
@given(st.integers(min_value=1, max_value=100))
def test_zero_input_point_strain(self, N):
matrix = np.zeros((N, N))
with self.assertRaises(ValueError):
qm.point_strain(high_distances=matrix, low_distances=matrix)
if __name__ == '__main__':
unittest.main()
| 42.301887
| 79
| 0.575992
| 2,186
| 17,936
| 4.46935
| 0.087374
| 0.063869
| 0.046162
| 0.053019
| 0.729376
| 0.692631
| 0.653838
| 0.630604
| 0.595189
| 0.562334
| 0
| 0.031304
| 0.321421
| 17,936
| 423
| 80
| 42.401891
| 0.771424
| 0.03886
| 0
| 0.506061
| 0
| 0
| 0.015394
| 0
| 0
| 0
| 0
| 0.002364
| 0.148485
| 1
| 0.10303
| false
| 0
| 0.027273
| 0.00303
| 0.157576
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
9b884dfe98e3224d03e56b0ff9073cf479be11aa
| 797
|
py
|
Python
|
addons/odoo_elasticsearch/models/trend_search_mapping.py
|
marionumza/vocal_v12
|
480990e919c9410903e06e7813ee92800bd6a569
|
[
"Unlicense"
] | null | null | null |
addons/odoo_elasticsearch/models/trend_search_mapping.py
|
marionumza/vocal_v12
|
480990e919c9410903e06e7813ee92800bd6a569
|
[
"Unlicense"
] | null | null | null |
addons/odoo_elasticsearch/models/trend_search_mapping.py
|
marionumza/vocal_v12
|
480990e919c9410903e06e7813ee92800bd6a569
|
[
"Unlicense"
] | 1
|
2021-05-05T07:59:08.000Z
|
2021-05-05T07:59:08.000Z
|
import logging
_logger = logging.getLogger(__name__)
from odoo import api, fields, models
class TrendSearchMapping(models.Model):
_name = 'trend.search.mapping'
_order = "sequence"
name = fields.Char(string="Keywords", required=True, help="Name of product")
trend_search_mapping_id = fields.Many2one('elastic.search.config',string='Field Index Mapping')
sequence = fields.Integer(default=5)
@api.model
def trend_search_map_set_default(self):
config_id = self.env['elastic.search.config'].search([],limit=1)
config_id.trending_state = 'enable'
self.create({
'name':'imac',
'trend_search_mapping_id':config_id.id
})
self.create({
'name':'ipad',
'trend_search_mapping_id':config_id.id
})
| 30.653846
| 99
| 0.667503
| 97
| 797
| 5.226804
| 0.494845
| 0.108481
| 0.142012
| 0.118343
| 0.118343
| 0.118343
| 0.118343
| 0
| 0
| 0
| 0
| 0.004732
| 0.204517
| 797
| 25
| 100
| 31.88
| 0.794953
| 0
| 0
| 0.285714
| 0
| 0
| 0.225847
| 0.110414
| 0
| 0
| 0
| 0
| 0
| 1
| 0.047619
| false
| 0
| 0.095238
| 0
| 0.428571
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
9b897b3e2e2a162da4ba6ef2e1e00007c3d0d7d3
| 25,363
|
py
|
Python
|
src/python/twitter/common/app/application.py
|
wfarner/commons
|
42988a7a49f012665174538cca53604c7846ee86
|
[
"Apache-2.0"
] | 1
|
2019-12-20T14:13:27.000Z
|
2019-12-20T14:13:27.000Z
|
src/python/twitter/common/app/application.py
|
wfarner/commons
|
42988a7a49f012665174538cca53604c7846ee86
|
[
"Apache-2.0"
] | null | null | null |
src/python/twitter/common/app/application.py
|
wfarner/commons
|
42988a7a49f012665174538cca53604c7846ee86
|
[
"Apache-2.0"
] | 1
|
2019-12-20T14:13:29.000Z
|
2019-12-20T14:13:29.000Z
|
# ==================================================================================================
# Copyright 2011 Twitter, Inc.
# --------------------------------------------------------------------------------------------------
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this work except in compliance with the License.
# You may obtain a copy of the License in the LICENSE file, or at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==================================================================================================
from __future__ import print_function
__author__ = 'Dave Buchfuhrer, Brian Wickman'
try:
import ConfigParser
except ImportError:
import configparser as ConfigParser
import atexit
import copy
import inspect
import optparse
import os
import shlex
import sys
import threading
from collections import defaultdict, deque
from functools import partial
from twitter.common import options
from twitter.common.app.module import AppModule
from twitter.common.app.inspection import Inspection
from twitter.common.lang import Compatibility
from twitter.common.process import daemonize
from twitter.common.util import topological_sort
class Application(object):
class Error(Exception): pass
# enforce a quasi-singleton interface (for resettable applications in test)
_Global = None
@staticmethod
def reset():
"""Reset the global application. Only useful for testing."""
Application._Global = Application()
@staticmethod
def active():
"""Return the current resident application object."""
return Application._Global
HELP_OPTIONS = [
options.Option("-h", "--help", "--short-help",
action="callback",
callback=lambda *args, **kwargs: Application.active()._short_help(*args, **kwargs),
help="show this help message and exit."),
options.Option("--long-help",
action="callback",
callback=lambda *args, **kwargs: Application.active()._long_help(*args, **kwargs),
help="show options from all registered modules, not just the __main__ module.")
]
IGNORE_RC_FLAG = '--app_ignore_rc_file'
APP_OPTIONS = {
'daemonize':
options.Option('--app_daemonize',
action='store_true',
default=False,
dest='twitter_common_app_daemonize',
help="Daemonize this application."),
'daemon_stdout':
options.Option('--app_daemon_stdout',
default='/dev/null',
dest='twitter_common_app_daemon_stdout',
help="Direct this app's stdout to this file if daemonized."),
'daemon_stderr':
options.Option('--app_daemon_stderr',
default='/dev/null',
dest='twitter_common_app_daemon_stderr',
help="Direct this app's stderr to this file if daemonized."),
'pidfile':
options.Option('--app_pidfile',
default=None,
dest='twitter_common_app_pidfile',
help="The pidfile to use if --app_daemonize is specified."),
'debug':
options.Option('--app_debug',
action='store_true',
default=False,
dest='twitter_common_app_debug',
help="Print extra debugging information during application initialization."),
'profiling':
options.Option('--app_profiling',
action='store_true',
default=False,
dest='twitter_common_app_profiling',
help="Run profiler on the code while it runs. Note this can cause slowdowns."),
'profile_output':
options.Option('--app_profile_output',
default=None,
metavar='FILENAME',
dest='twitter_common_app_profile_output',
help="Dump the profiling output to a binary profiling format."),
'rc_filename':
options.Option('--app_rc_filename',
action='store_true',
default=False,
dest='twitter_common_app_rc_filename',
help="Print the filename for the rc file and quit."),
'ignore_rc_file':
options.Option(IGNORE_RC_FLAG,
action='store_true',
default=False,
dest='twitter_common_app_ignore_rc_file',
help="Ignore default arguments from the rc file."),
}
NO_COMMAND = 'DEFAULT'
OPTIONS = 'options'
OPTIONS_ATTR = '__options__'
def __init__(self):
self._name = None
self._registered_modules = []
self._init_modules = []
self._option_targets = defaultdict(dict)
self._global_options = {}
self._interspersed_args = False
self._main_options = Application.HELP_OPTIONS[:]
self._usage = ""
self._profiler = None
self._commands = {}
self._reset()
for opt in Application.APP_OPTIONS.values():
self.add_option(opt)
self._configure_options(None, Application.APP_OPTIONS)
def _raise_if_initialized(self, msg="Cannot perform operation after initialization!"):
if self.initialized:
raise Application.Error(msg)
def _raise_if_uninitialized(self, msg="Cannot perform operation before initialization!"):
if not self.initialized:
raise Application.Error(msg)
def _reset(self):
"""
Resets the state set up by init() so that init() may be called again.
"""
self.initialized = False
self._option_values = options.Values()
self._argv = []
def interspersed_args(self, value):
self._interspersed_args = bool(value)
def _configure_options(self, module, option_dict):
for opt_name, opt in option_dict.items():
self._option_targets[module][opt_name] = opt.dest
def configure(self, module=None, **kw):
"""
Configure the application object or its activated modules.
Typically application modules export flags that can be defined on the
command-line. In order to allow the application to override defaults,
these modules may export named parameters to be overridden. For example,
the Application object itself exports named variables such as "debug" or
"profiling", which can be enabled via:
app.configure(debug=True)
and
app.configure(profiling=True)
respectively. They can also be enabled with their command-line argument
counterpart, e.g.
./my_application --app_debug --app_profiling
Some modules export named options, e.g. twitter.common.app.modules.http exports
'enable', 'host', 'port'. The command-line arguments still take precedence and
will override any defaults set by the application in app.configure. To activate
these options, just pass along the module name:
app.configure(module='twitter.common.app.modules.http', enable=True)
"""
if module not in self._option_targets:
if not self._import_module(module):
raise Application.Error('Unknown module to configure: %s' % module)
def configure_option(name, value):
if name not in self._option_targets[module]:
raise Application.Error('Module %s has no option %s' % (module, name))
self.set_option(self._option_targets[module][name], value)
for option_name, option_value in kw.items():
configure_option(option_name, option_value)
def _main_parser(self):
return (options.parser()
.interspersed_arguments(self._interspersed_args)
.options(self._main_options)
.usage(self._usage))
def command_parser(self, command):
assert command in self._commands
values_copy = copy.deepcopy(self._option_values)
parser = self._main_parser()
command_group = options.new_group(('For %s only' % command) if command else 'Default')
for option in getattr(self._commands[command], Application.OPTIONS_ATTR):
op = copy.deepcopy(option)
if not hasattr(values_copy, op.dest):
setattr(values_copy, op.dest, op.default if op.default != optparse.NO_DEFAULT else None)
Application.rewrite_help(op)
op.default = optparse.NO_DEFAULT
command_group.add_option(op)
parser = parser.groups([command_group]).values(values_copy)
usage = self._commands[command].__doc__
if usage:
parser = parser.usage(usage)
return parser
def _construct_partial_parser(self):
"""
Construct an options parser containing only options added by __main__
or global help options registered by the application.
"""
if hasattr(self._commands.get(self._command), Application.OPTIONS_ATTR):
return self.command_parser(self._command)
else:
return self._main_parser().values(copy.deepcopy(self._option_values))
def _construct_full_parser(self):
"""
Construct an options parser containing both local and global (module-level) options.
"""
return self._construct_partial_parser().groups(self._global_options.values())
def _rc_filename(self):
rc_short_filename = '~/.%src' % self.name()
return os.path.expanduser(rc_short_filename)
def _add_default_options(self, argv):
"""
Return an argument list with options from the rc file prepended.
"""
rc_filename = self._rc_filename()
options = argv
if Application.IGNORE_RC_FLAG not in argv and os.path.exists(rc_filename):
command = self._command or Application.NO_COMMAND
rc_config = ConfigParser.SafeConfigParser()
rc_config.read(rc_filename)
if rc_config.has_option(command, Application.OPTIONS):
default_options_str = rc_config.get(command, Application.OPTIONS)
default_options = shlex.split(default_options_str, True)
options = default_options + options
return options
def _parse_options(self, force_args=None):
"""
Parse options and set self.option_values and self.argv to the values to be passed into
the application's main() method.
"""
argv = sys.argv[1:] if force_args is None else force_args
if argv and argv[0] in self._commands:
self._command = argv.pop(0)
else:
self._command = None
parser = self._construct_full_parser()
self._option_values, self._argv = parser.parse(self._add_default_options(argv))
def _short_help(self, option, opt, value, parser):
self._construct_partial_parser().print_help()
sys.exit(1)
def _long_help(self, option, opt, value, parser):
self._construct_full_parser().print_help()
sys.exit(1)
def _setup_modules(self):
"""
Setup all initialized modules.
"""
module_registry = AppModule.module_registry()
for bundle in topological_sort(AppModule.module_dependencies()):
for module_label in bundle:
assert module_label in module_registry
module = module_registry[module_label]
self._debug_log('Initializing: %s (%s)' % (module.label(), module.description()))
try:
module.setup_function()
except AppModule.Unimplemented:
pass
self._init_modules.append(module.label())
def _teardown_modules(self):
"""
Teardown initialized module in reverse initialization order.
"""
module_registry = AppModule.module_registry()
for module_label in reversed(self._init_modules):
assert module_label in module_registry
module = module_registry[module_label]
self._debug_log('Running exit function for %s (%s)' % (module_label, module.description()))
try:
module.teardown_function()
except AppModule.Unimplemented:
pass
def _maybe_daemonize(self):
if self._option_values.twitter_common_app_daemonize:
daemonize(pidfile=self._option_values.twitter_common_app_pidfile,
stdout=self._option_values.twitter_common_app_daemon_stdout,
stderr=self._option_values.twitter_common_app_daemon_stderr)
# ------- public exported methods -------
def init(self, force_args=None):
"""
Initialize the state necessary to run the application's main() function but
without actually invoking main. Mostly useful for testing. If force_args
specified, use those arguments instead of sys.argv[1:].
"""
self._raise_if_initialized("init cannot be called twice. Use reinit if necessary.")
self._parse_options(force_args)
self._maybe_daemonize()
self._setup_modules()
self.initialized = True
def reinit(self, force_args=None):
"""
Reinitialize the application. This clears the stateful parts of the application
framework and reruns init(). Mostly useful for testing.
"""
self._reset()
self.init(force_args)
def argv(self):
self._raise_if_uninitialized("Must call app.init() before you may access argv.")
return self._argv
def add_module_path(self, name, path):
"""
Add all app.Modules defined by name at path.
Typical usage (e.g. from the __init__.py of something containing many
app modules):
app.add_module_path(__name__, __path__)
"""
import pkgutil
for _, mod, ispkg in pkgutil.iter_modules(path):
if ispkg: continue
fq_module = '.'.join([name, mod])
__import__(fq_module)
for (kls_name, kls) in inspect.getmembers(sys.modules[fq_module], inspect.isclass):
if issubclass(kls, AppModule):
self.register_module(kls())
def register_module(self, module):
"""
Register an app.Module and all its options.
"""
if not isinstance(module, AppModule):
raise TypeError('register_module should be called with a subclass of AppModule')
if module.label() in self._registered_modules:
# Do not reregister.
return
if hasattr(module, 'OPTIONS'):
if not isinstance(module.OPTIONS, dict):
raise Application.Error('Registered app.Module %s has invalid OPTIONS.' % module.__module__)
for opt in module.OPTIONS.values():
self._add_option(module.__module__, opt)
self._configure_options(module.label(), module.OPTIONS)
self._registered_modules.append(module.label())
@staticmethod
def _get_module_key(module):
return 'From module %s' % module
def _add_main_option(self, option):
self._main_options.append(option)
def _add_module_option(self, module, option):
calling_module = Application._get_module_key(module)
if calling_module not in self._global_options:
self._global_options[calling_module] = options.new_group(calling_module)
self._global_options[calling_module].add_option(option)
@staticmethod
def rewrite_help(op):
if hasattr(op, 'help') and isinstance(op.help, Compatibility.string):
if op.help.find('%default') != -1 and op.default != optparse.NO_DEFAULT:
op.help = op.help.replace('%default', str(op.default))
else:
op.help = op.help + ((' [default: %s]' % str(op.default))
if op.default != optparse.NO_DEFAULT else '')
def _add_option(self, calling_module, option):
op = copy.deepcopy(option)
if op.dest and hasattr(op, 'default'):
self.set_option(op.dest, op.default if op.default != optparse.NO_DEFAULT else None,
force=False)
Application.rewrite_help(op)
op.default = optparse.NO_DEFAULT
if calling_module == '__main__':
self._add_main_option(op)
else:
self._add_module_option(calling_module, op)
def _get_option_from_args(self, args, kwargs):
if len(args) == 1 and kwargs == {} and isinstance(args[0], options.Option):
return args[0]
else:
return options.TwitterOption(*args, **kwargs)
def add_option(self, *args, **kwargs):
"""
Add an option to the application.
You may pass either an Option object from the optparse/options module, or
pass the *args/**kwargs necessary to construct an Option.
"""
self._raise_if_initialized("Cannot call add_option() after main()!")
calling_module = Inspection.find_calling_module()
added_option = self._get_option_from_args(args, kwargs)
self._add_option(calling_module, added_option)
def command(self, function=None, name=None):
"""
Decorator to turn a function into an application command.
To add a command foo, the following patterns will both work:
@app.command
def foo(args, options):
...
@app.command(name='foo')
def bar(args, options):
...
"""
if name is None:
return self._register_command(function)
else:
return partial(self._register_command, command_name=name)
def _register_command(self, function, command_name=None):
"""
Registers function as the handler for command_name. Uses function.__name__ if command_name
is None.
"""
if Inspection.find_calling_module() == '__main__':
if command_name is None:
command_name = function.__name__
if command_name in self._commands:
raise Application.Error('Found two definitions for command %s' % command_name)
self._commands[command_name] = function
return function
def default_command(self, function):
"""
Decorator to make a command default.
"""
if Inspection.find_calling_module() == '__main__':
if None in self._commands:
defaults = (self._commands[None].__name__, function.__name__)
raise Application.Error('Found two default commands: %s and %s' % defaults)
self._commands[None] = function
return function
def command_option(self, *args, **kwargs):
"""
Decorator to add an option only for a specific command.
"""
def register_option(function):
added_option = self._get_option_from_args(args, kwargs)
if not hasattr(function, Application.OPTIONS_ATTR):
setattr(function, Application.OPTIONS_ATTR, deque())
getattr(function, Application.OPTIONS_ATTR).appendleft(added_option)
return function
return register_option
def copy_command_options(self, command_function):
"""
Decorator to copy command options from another command.
"""
def register_options(function):
if hasattr(command_function, Application.OPTIONS_ATTR):
if not hasattr(function, Application.OPTIONS_ATTR):
setattr(function, Application.OPTIONS_ATTR, deque())
command_options = getattr(command_function, Application.OPTIONS_ATTR)
getattr(function, Application.OPTIONS_ATTR).extendleft(command_options)
return function
return register_options
def add_command_options(self, command_function):
"""
Function to add all options from a command
"""
module = inspect.getmodule(command_function).__name__
for option in getattr(command_function, Application.OPTIONS_ATTR, []):
self._add_option(module, option)
def _debug_log(self, msg):
if hasattr(self._option_values, 'twitter_common_app_debug') and (
self._option_values.twitter_common_app_debug):
print('twitter.common.app debug: %s' % msg, file=sys.stderr)
def set_option(self, dest, value, force=True):
"""
Set a global option value either pre- or post-initialization.
If force=False, do not set the default if already overridden by a manual call to
set_option.
"""
if hasattr(self._option_values, dest) and not force:
return
setattr(self._option_values, dest, value)
def get_options(self):
"""
Return all application options, both registered by __main__ and all imported modules.
"""
return self._option_values
def get_commands(self):
"""
Return all valid commands registered by __main__
"""
return filter(None, self._commands.keys())
def get_commands_and_docstrings(self):
"""
Generate all valid commands together with their docstrings
"""
for command, function in self._commands.items():
if command is not None:
yield command, function.__doc__
def get_local_options(self):
"""
Return the options only defined by __main__.
"""
new_values = options.Values()
for opt in self._main_options:
if opt.dest:
setattr(new_values, opt.dest, getattr(self._option_values, opt.dest))
return new_values
def set_usage(self, usage):
"""
Set the usage message should the user call --help or invalidly specify options.
"""
self._usage = usage
def error(self, message):
"""
Print the application help message, an error message, then exit.
"""
self._construct_partial_parser().error(message)
def help(self):
"""
Print the application help message and exit.
"""
self._short_help(*(None,)*4)
def set_name(self, application_name):
"""
Set the application name. (Autodetect otherwise.)
"""
self._raise_if_initialized("Cannot set application name.")
self._name = application_name
def name(self):
"""
Return the name of the application. If set_name was never explicitly called,
the application framework will attempt to autodetect the name of the application
based upon the location of __main__.
"""
if self._name is not None:
return self._name
else:
try:
return Inspection.find_application_name()
except:
return 'unknown'
def quit(self, rc, exit_function=sys.exit):
self._debug_log('Shutting application down.')
self._teardown_modules()
self._debug_log('Finishing up module teardown.')
nondaemons = 0
self.dump_profile()
for thr in threading.enumerate():
self._debug_log(' Active thread%s: %s' % (' (daemon)' if thr.isDaemon() else '', thr))
if thr is not threading.current_thread() and not thr.isDaemon():
nondaemons += 1
if nondaemons:
self._debug_log('More than one active non-daemon thread, your application may hang!')
else:
self._debug_log('Exiting cleanly.')
exit_function(rc)
def profiler(self):
if self._option_values.twitter_common_app_profiling:
if self._profiler is None:
try:
import cProfile as profile
except ImportError:
import profile
self._profiler = profile.Profile()
return self._profiler
else:
return None
def dump_profile(self):
if self._option_values.twitter_common_app_profiling:
if self._option_values.twitter_common_app_profile_output:
self.profiler().dump_stats(self._option_values.twitter_common_app_profile_output)
else:
self.profiler().print_stats(sort='time')
def _run_main(self, main_method, *args, **kwargs):
try:
if self.profiler():
rc = self.profiler().runcall(main_method, *args, **kwargs)
else:
rc = main_method(*args, **kwargs)
except SystemExit as e:
rc = e.code
self._debug_log('main_method exited with return code = %s' % repr(rc))
except KeyboardInterrupt as e:
rc = None
self._debug_log('main_method exited with ^C')
return rc
def _import_module(self, name):
"""
Import the module, return True on success, False if the import failed.
"""
try:
__import__(name)
return True
except ImportError:
return False
def main(self):
"""
If called from __main__ module, run script's main() method with arguments passed
and global options parsed.
The following patterns are acceptable for the main method:
main()
main(args)
main(args, options)
"""
main_module = Inspection.find_calling_module()
if main_module != '__main__':
# only support if __name__ == '__main__'
return
# Pull in modules in twitter.common.app.modules
if not self._import_module('twitter.common.app.modules'):
print('Unable to import twitter app modules!', file=sys.stderr)
sys.exit(1)
# defer init as long as possible.
self.init()
if self._option_values.twitter_common_app_rc_filename:
print('RC filename: %s' % self._rc_filename())
return
try:
caller_main = Inspection.find_main_from_caller()
except Inspection.InternalError:
caller_main = None
if None in self._commands:
assert caller_main is None, "Error: Cannot define both main and a default command."
else:
self._commands[None] = caller_main
main_method = self._commands[self._command]
if main_method is None:
commands = sorted(self.get_commands())
if commands:
print('Must supply one of the following commands:', ', '.join(commands), file=sys.stderr)
else:
print('No main() or command defined! Application must define one of these.', file=sys.stderr)
sys.exit(1)
try:
argspec = inspect.getargspec(main_method)
except TypeError as e:
print('Malformed main(): %s' % e, file=sys.stderr)
sys.exit(1)
if len(argspec.args) == 1:
args = [self._argv]
elif len(argspec.args) == 2:
args = [self._argv, self._option_values]
else:
if len(self._argv) != 0:
print('main() takes no arguments but got leftover arguments: %s!' %
' '.join(self._argv), file=sys.stderr)
sys.exit(1)
args = []
rc = self._run_main(main_method, *args)
self.quit(rc)
| 34.791495
| 101
| 0.674644
| 3,177
| 25,363
| 5.15266
| 0.147624
| 0.024618
| 0.02639
| 0.015455
| 0.232926
| 0.140257
| 0.122908
| 0.089798
| 0.067563
| 0.032254
| 0
| 0.001409
| 0.216654
| 25,363
| 728
| 102
| 34.839286
| 0.822529
| 0.200725
| 0
| 0.195745
| 0
| 0
| 0.130551
| 0.016242
| 0
| 0
| 0
| 0
| 0.008511
| 1
| 0.125532
| false
| 0.006383
| 0.065957
| 0.004255
| 0.285106
| 0.023404
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
9b89b21d0ec82cd3eb4f531c62145aac5544814d
| 3,838
|
py
|
Python
|
nas/gui/end_registration_window.py
|
RolandZitny/BC-NAS
|
df2b1c643e5dce3b48c72026b4f83f895f33b822
|
[
"MIT"
] | null | null | null |
nas/gui/end_registration_window.py
|
RolandZitny/BC-NAS
|
df2b1c643e5dce3b48c72026b4f83f895f33b822
|
[
"MIT"
] | null | null | null |
nas/gui/end_registration_window.py
|
RolandZitny/BC-NAS
|
df2b1c643e5dce3b48c72026b4f83f895f33b822
|
[
"MIT"
] | null | null | null |
import base64
import os
import matplotlib.pyplot as plt
import cv2
import numpy as np
from PyQt5 import uic
from PyQt5 import QtWidgets
from PyQt5 import QtMultimedia
from PyQt5 import QtMultimediaWidgets
from PyQt5.QtGui import QImage, QPixmap
from PyQt5.QtWidgets import QDesktopWidget
from nas.gui.login_stimulation_window import LoginStimulationPresentation
import config
directory_path = os.path.dirname(os.path.abspath(__file__))
ui_path = os.path.join(directory_path, "designs" + os.sep + "end_registration_window.ui")
Ui_RegWindow, QtBaseClass = uic.loadUiType(ui_path) # Load .ui file.
class EndRegistrationWindow(QtWidgets.QMainWindow, Ui_RegWindow):
"""
Class to display the final window that confirms the registration.
The user can perform a test login from this window.
:param reg_user: The object of the registered user.
:type reg_user: ``user``
"""
def __init__(self, reg_user):
QtWidgets.QMainWindow.__init__(self)
Ui_RegWindow.__init__(self)
self.setupUi(self)
self.reg_user = reg_user
self.login_window = None
self.set_up_window()
self.get_reaction_plot()
self.set_end_figure()
def set_up_window(self):
"""
Makes other window settings, such as connecting buttons, etc.
"""
# Center window to screen.
qt_rectangle = self.frameGeometry()
center_point = QDesktopWidget().availableGeometry().center()
qt_rectangle.moveCenter(center_point)
self.move(qt_rectangle.topLeft())
qt_rectangle.moveCenter(center_point)
self.move(qt_rectangle.topLeft())
self.end_name.setText(self.end_name.text() + " " + self.reg_user.get_name())
self.end_surname.setText(self.end_surname.text() + " " + self.reg_user.get_surname())
self.end_loginId.setText(self.end_loginId.text() + " " + self.reg_user.get_id())
self.TestLogin.clicked.connect(self.testing_log_in)
def get_reaction_plot(self):
"""
Creates a graph of responses to self-face and non-self-face stimuli.
This graph is stored at `TMP_END_FIGURE`.
"""
reactions, react_types = self.reg_user.get_reg_data()
self_face_reaction = None
non_self_face_reaction = None
for i in range(len(react_types)):
if react_types[i] == 1:
self_face_reaction = reactions[i]
non_self_face_reaction = reactions[i + 1]
break
fig, axs = plt.subplots(2)
fig.suptitle('Self-face & Non-self-face reakcia')
axs[0].plot(self_face_reaction[0])
axs[0].set_title('Self-face')
axs[1].plot(non_self_face_reaction[0])
axs[1].set_title('Non-self-face')
plt.setp(axs[0], ylabel='mV')
plt.setp(axs[1], ylabel='mV')
fig.tight_layout()
plt.savefig(config.TMP_END_FIGURE)
plt.clf()
def set_end_figure(self):
"""
Draw a graph of the reaction in the window.
"""
pixmap = QPixmap(config.TMP_END_FIGURE)
self.ReactionLabel.setPixmap(QPixmap(pixmap.scaledToHeight(500)))
self.clean_tmp()
@staticmethod
def clean_tmp():
"""
Cleans up the temporary files folder.
"""
if os.path.exists(config.TMP_END_FIGURE):
os.remove(config.TMP_END_FIGURE)
if os.path.exists(config.TMP_PHOTO):
os.remove(config.TMP_PHOTO)
if os.path.exists(config.TMP_PROC_PHOTO):
os.remove(config.TMP_PROC_PHOTO)
def testing_log_in(self):
"""
Performs a test login.
"""
self.login_window = LoginStimulationPresentation(self.reg_user)
self.login_window.showMaximized()
self.hide()
| 32.252101
| 94
| 0.647473
| 490
| 3,838
| 4.838776
| 0.32449
| 0.040489
| 0.032476
| 0.023619
| 0.176719
| 0.078026
| 0.048925
| 0.048925
| 0.048925
| 0.048925
| 0
| 0.008006
| 0.251433
| 3,838
| 118
| 95
| 32.525424
| 0.817264
| 0.133924
| 0
| 0.054795
| 0
| 0
| 0.03129
| 0.008301
| 0
| 0
| 0
| 0
| 0
| 1
| 0.082192
| false
| 0
| 0.178082
| 0
| 0.273973
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
9b8af184786b7b838926fd6c07d47b9fd8a6c329
| 445
|
py
|
Python
|
testing/matplotlib_test.py
|
deranderemark/CigarTracer
|
3f1172683c57dc7f28dd7517132014b23adfff90
|
[
"Apache-2.0"
] | null | null | null |
testing/matplotlib_test.py
|
deranderemark/CigarTracer
|
3f1172683c57dc7f28dd7517132014b23adfff90
|
[
"Apache-2.0"
] | 1
|
2022-02-06T15:50:07.000Z
|
2022-02-06T15:50:07.000Z
|
testing/matplotlib_test.py
|
deranderemark/CigarTracer
|
3f1172683c57dc7f28dd7517132014b23adfff90
|
[
"Apache-2.0"
] | null | null | null |
import matplotlib.pyplot as plt
# Diagramm und Achsen definieren
fig, ax = plt.subplots()
# Werte für Tabelle erstellen
table_data=[
["1", 30, 34],
["2", 20, 223],
["3", 33, 2354],
["4", 25, 234],
["5", 12, 929]
]
#Tabelle erstellen
table = ax.table(cellText=table_data, loc='center', colLabels=['SD', 'ID', 'Score'])
# Tabelle ändern
table.set_fontsize(14)
table.scale(1,4)
ax.axis('off')
#Tabelle anzeigen
plt.show()
| 17.8
| 84
| 0.633708
| 65
| 445
| 4.292308
| 0.753846
| 0.114695
| 0.150538
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.092896
| 0.177528
| 445
| 24
| 85
| 18.541667
| 0.669399
| 0.238202
| 0
| 0
| 0
| 0
| 0.069069
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.071429
| 0
| 0.071429
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
9b8bb3b48e86a641ba4d24045654d0c3bccfafdb
| 5,242
|
py
|
Python
|
venv/Lib/site-packages/func_timeout/StoppableThread.py
|
lijj0812/UIAutoDemo
|
3e13380adeb6cf92410676ff7c125dbee598427f
|
[
"Apache-2.0"
] | 1
|
2021-01-12T14:39:01.000Z
|
2021-01-12T14:39:01.000Z
|
venv/Lib/site-packages/func_timeout/StoppableThread.py
|
lijj0812/UIAutoDemo
|
3e13380adeb6cf92410676ff7c125dbee598427f
|
[
"Apache-2.0"
] | 2
|
2021-06-16T19:56:35.000Z
|
2021-06-16T19:57:49.000Z
|
venv/Lib/site-packages/func_timeout/StoppableThread.py
|
lijj0812/UIAutoDemo
|
3e13380adeb6cf92410676ff7c125dbee598427f
|
[
"Apache-2.0"
] | 1
|
2020-09-17T07:56:53.000Z
|
2020-09-17T07:56:53.000Z
|
'''
Copyright (c) 2016, 2017, 2019 Timothy Savannah All Rights Reserved.
Licensed under the Lesser GNU Public License Version 3, LGPLv3. You should have recieved a copy of this with the source distribution as
LICENSE, otherwise it is available at https://github.com/kata198/func_timeout/LICENSE
'''
import os
import ctypes
import threading
__all__ = ('StoppableThread', 'JoinThread')
class StoppableThread(threading.Thread):
'''
StoppableThread - A thread that can be stopped by forcing an exception in the execution context.
This works both to interrupt code that is in C or in python code, at either the next call to a python function,
or the next line in python code.
It is recommended that if you call stop ( @see StoppableThread.stop ) that you use an exception that inherits BaseException, to ensure it likely isn't caught.
Also, beware unmarked exception handlers in your code. Code like this:
while True:
try:
doSomething()
except:
continue
will never be able to abort, because the exception you raise is immediately caught.
The exception is raised over and over, with a specifed delay (default 2.0 seconds)
'''
def _stopThread(self, exception, raiseEvery=2.0):
'''
_stopThread - @see StoppableThread.stop
'''
if self.is_alive() is False:
return True
self._stderr = open(os.devnull, 'w')
# Create "joining" thread which will raise the provided exception
# on a repeat, until the thread stops.
joinThread = JoinThread(self, exception, repeatEvery=raiseEvery)
# Try to prevent spurrious prints
joinThread._stderr = self._stderr
joinThread.start()
joinThread._stderr = self._stderr
def stop(self, exception, raiseEvery=2.0):
'''
Stops the thread by raising a given exception.
@param exception <Exception type> - Exception to throw. Likely, you want to use something
that inherits from BaseException (so except Exception as e: continue; isn't a problem)
This should be a class/type, NOT an instance, i.e. MyExceptionType not MyExceptionType()
@param raiseEvery <float> Default 2.0 - We will keep raising this exception every #raiseEvery seconds,
until the thread terminates.
If your code traps a specific exception type, this will allow you #raiseEvery seconds to cleanup before exit.
If you're calling third-party code you can't control, which catches BaseException, set this to a low number
to break out of their exception handler.
@return <None>
'''
return self._stopThread(exception, raiseEvery)
class JoinThread(threading.Thread):
'''
JoinThread - The workhouse that stops the StoppableThread.
Takes an exception, and upon being started immediately raises that exception in the current context
of the thread's execution (so next line of python gets it, or next call to a python api function in C code ).
@see StoppableThread for more details
'''
def __init__(self, otherThread, exception, repeatEvery=2.0):
'''
__init__ - Create a JoinThread (don't forget to call .start() ! )
@param otherThread <threading.Thread> - A thread
@param exception <BaseException> - An exception. Should be a BaseException, to prevent "catch Exception as e: continue" type code
from never being terminated. If such code is unavoidable, you can try setting #repeatEvery to a very low number, like .00001,
and it will hopefully raise within the context of the catch, and be able to break free.
@param repeatEvery <float> Default 2.0 - After starting, the given exception is immediately raised. Then, every #repeatEvery seconds,
it is raised again, until the thread terminates.
'''
threading.Thread.__init__(self)
self.otherThread = otherThread
self.exception = exception
self.repeatEvery = repeatEvery
self.daemon = True
def run(self):
'''
run - The thread main. Will attempt to stop and join the attached thread.
'''
# Try to silence default exception printing.
self.otherThread._Thread__stderr = self._stderr
if hasattr(self.otherThread, '_Thread__stop'):
# If py2, call this first to start thread termination cleanly.
# Python3 does not need such ( nor does it provide.. )
self.otherThread._Thread__stop()
while self.otherThread.is_alive():
# We loop raising exception incase it's caught hopefully this breaks us far out.
ctypes.pythonapi.PyThreadState_SetAsyncExc(ctypes.c_long(self.otherThread.ident), ctypes.py_object(self.exception))
self.otherThread.join(self.repeatEvery)
try:
self._stderr.close()
except:
pass
# vim: set ts=4 sw=4 expandtab :
| 39.119403
| 166
| 0.647844
| 650
| 5,242
| 5.161538
| 0.378462
| 0.035768
| 0.008048
| 0.006557
| 0.025037
| 0
| 0
| 0
| 0
| 0
| 0
| 0.01019
| 0.28863
| 5,242
| 133
| 167
| 39.413534
| 0.889515
| 0.630866
| 0
| 0.060606
| 0
| 0
| 0.026531
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.121212
| false
| 0.030303
| 0.090909
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
9b9174eb28a0ab4b95d4146848ae09c6b7a36f4f
| 1,883
|
py
|
Python
|
network.py
|
YanhengWang/Draughts
|
ad19ccbd3c4fc0defda68c45ed8f2dd14969f2a3
|
[
"Apache-2.0"
] | null | null | null |
network.py
|
YanhengWang/Draughts
|
ad19ccbd3c4fc0defda68c45ed8f2dd14969f2a3
|
[
"Apache-2.0"
] | 1
|
2020-10-12T00:33:54.000Z
|
2020-10-12T00:33:54.000Z
|
network.py
|
YanhengWang/Draughts
|
ad19ccbd3c4fc0defda68c45ed8f2dd14969f2a3
|
[
"Apache-2.0"
] | null | null | null |
from utils import PATH_LABEL
from utils import PATH_DATA_FOLDER
import pickle
import torch
import torch.nn as nn
import torch.utils.data as dat
class ResBlock(nn.Module):
def __init__(self, inChannels, outChannels):
super(ResBlock, self).__init__()
self.matchDimension = None
if inChannels != outChannels:
self.matchDimension = nn.Conv2d(inChannels, outChannels, 1, stride = 1, bias = False)
self.conv1 = nn.Conv2d(inChannels, outChannels, 3, stride = 1, padding = 1, bias = False)
self.bn1 = nn.BatchNorm2d(outChannels)
self.conv2 = nn.Conv2d(outChannels, outChannels, 3, stride = 1, padding = 1, bias = False)
self.bn2 = nn.BatchNorm2d(outChannels)
def forward(self, x):
out = self.conv1(x)
out = self.bn1(out)
out = nn.functional.relu(out)
out = self.conv2(out)
out = self.bn2(out)
if self.matchDimension == None:
out += x
else:
out += self.matchDimension(x)
out = nn.functional.relu(out)
return out
class ResNet(nn.Module):
def __init__(self):
super(ResNet, self).__init__()
self.conv = nn.Conv2d(4, 32, 3, stride = 1, padding = 1, bias = False)
self.bn = nn.BatchNorm2d(32)
blockList = [ResBlock(32, 32) for i in range(5)]
self.res = nn.Sequential(*blockList)
self.pool = nn.AvgPool2d(5)
self.fc = nn.Linear(128, 1)
def forward(self, x):
x = self.conv(x)
x = self.bn(x)
x = nn.functional.relu(x)
x = self.res(x)
x = self.pool(x)
x = x.view(x.size()[0], -1)
x = self.fc(x)
x = torch.tanh(x)
return x
class Dataset(dat.Dataset):
def __init__(self):
with open(PATH_LABEL, "rb") as f:
self.labelList = pickle.load(f)
def __getitem__(self, index):
with open(PATH_DATA_FOLDER + str(index) + ".dat", "rb") as f:
data = torch.FloatTensor(pickle.load(f))
label = torch.FloatTensor([self.labelList[index]])
return data, label
def __len__(self):
return len(self.labelList)
| 27.289855
| 92
| 0.677111
| 287
| 1,883
| 4.324042
| 0.268293
| 0.012893
| 0.032232
| 0.045125
| 0.153908
| 0.087832
| 0.087832
| 0.087832
| 0.064464
| 0
| 0
| 0.028479
| 0.179501
| 1,883
| 68
| 93
| 27.691176
| 0.774757
| 0
| 0
| 0.103448
| 0
| 0
| 0.004249
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.12069
| false
| 0
| 0.103448
| 0.017241
| 0.344828
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
9b926fbc1417f4ebd631923a6169eb196f0aff02
| 760
|
py
|
Python
|
WebApp/main/utility/StringUtility.py
|
georg-wenzel/ml-data-smell-detection
|
7dddd401ca1f1a830dfd8b00760659911e5b1086
|
[
"MIT"
] | 1
|
2022-03-29T14:46:40.000Z
|
2022-03-29T14:46:40.000Z
|
WebApp/main/utility/StringUtility.py
|
georg-wenzel/ml-data-smell-detection
|
7dddd401ca1f1a830dfd8b00760659911e5b1086
|
[
"MIT"
] | null | null | null |
WebApp/main/utility/StringUtility.py
|
georg-wenzel/ml-data-smell-detection
|
7dddd401ca1f1a830dfd8b00760659911e5b1086
|
[
"MIT"
] | 1
|
2021-06-13T08:24:46.000Z
|
2021-06-13T08:24:46.000Z
|
# Utility functions for Strings (i.e. storing common strings once)
#define common strings
ERR_MISSING_KEY = "The field(s) {0} must be filled in this form."
ERR_INVALID_KEY = "The field '{0}' contains an invalid value."
ERR_UNAUTHORIZED = "The logged in user does not have access to this value: {0}"
MSG_FINISHED_TRAINING = "Your agent {0} has finished training and can now be used."
#define error string for (multiple) column mismatch
#pass tuple of mismatched columns as defined by AgentUtility.dataset_all_columns_match
def ERR_COLUMN_MISMATCH(columns_mismatched):
err = ("Column type mismatch: " +
" ".join(list(map(lambda x: "\"" + x[0] + "\": Expected " + str(x[1]) + ", but was " + str(x[2]) + ".", columns_mismatched))))
return err
| 54.285714
| 138
| 0.709211
| 114
| 760
| 4.605263
| 0.666667
| 0.049524
| 0.041905
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.011111
| 0.171053
| 760
| 14
| 139
| 54.285714
| 0.822222
| 0.289474
| 0
| 0
| 0
| 0
| 0.481343
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.125
| false
| 0
| 0
| 0
| 0.25
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
9b94d42c4e5c72f5294c49e1d55e12f33a9b3855
| 2,027
|
py
|
Python
|
visualization/draw.py
|
DougMHu/roomba-obstacle-mapping
|
019b6108c1967c7daabe7b4795cfac7ef0e79cf7
|
[
"MIT"
] | 3
|
2018-05-26T20:41:27.000Z
|
2020-10-19T12:40:42.000Z
|
visualization/draw.py
|
DougMHu/roomba-obstacle-mapping
|
019b6108c1967c7daabe7b4795cfac7ef0e79cf7
|
[
"MIT"
] | null | null | null |
visualization/draw.py
|
DougMHu/roomba-obstacle-mapping
|
019b6108c1967c7daabe7b4795cfac7ef0e79cf7
|
[
"MIT"
] | 1
|
2017-01-31T09:47:21.000Z
|
2017-01-31T09:47:21.000Z
|
# MIT License
# Copyright (c) 2016 Aashiq Ahmed, Shuai Chen, Meha Deora, Douglas Hu
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import turtle
turtle.setup(800,800)
wn = turtle.Screen()
doug = turtle.Turtle()
# Draw a grid
length = 5
width = 2
step = 50
def draw_row(width, mark):
doug.up()
for i in range(width):
if (mark[i]):
doug.down()
doug.begin_fill()
for j in range(4):
doug.fd(step)
doug.right(90)
if (mark[i]):
doug.end_fill()
doug.fd(step)
doug.up()
#draw_row(width,[1,0])
def draw_matrix(mark):
doug.up()
rows = len(mark)
cols = len(mark[0])
orig = doug.fillcolor()
doug.fillcolor('red')
for row in mark:
draw_row(cols, row)
doug.fd(-cols*step)
doug.right(90)
doug.fd(step)
doug.left(90)
doug.fillcolor(orig)
draw_matrix([[0,1],[1,1]])
# doug.left(90)
# doug.fd((width-0.5)*step)
# doug.right(90)
# doug.up()
# doug.fd(0.5*step)
# doug.down()
# doug.pensize(step)
# doug.fd((length-1)*step)
turtle.getscreen()._root.mainloop()
#doug.fd(length*step)
| 25.658228
| 80
| 0.716823
| 330
| 2,027
| 4.378788
| 0.448485
| 0.0609
| 0.020761
| 0.029066
| 0.026298
| 0
| 0
| 0
| 0
| 0
| 0
| 0.021958
| 0.168722
| 2,027
| 78
| 81
| 25.987179
| 0.835608
| 0.637395
| 0
| 0.285714
| 0
| 0
| 0.004249
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.057143
| false
| 0
| 0.028571
| 0
| 0.085714
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
9b9ade24b6474ca9ac882881c781ddb3dc8e1ab1
| 2,180
|
py
|
Python
|
activate.py
|
cassidoxa/bottoman
|
9d04331794ffb8bb745fc175c15db6d4a1f5714c
|
[
"MIT"
] | null | null | null |
activate.py
|
cassidoxa/bottoman
|
9d04331794ffb8bb745fc175c15db6d4a1f5714c
|
[
"MIT"
] | null | null | null |
activate.py
|
cassidoxa/bottoman
|
9d04331794ffb8bb745fc175c15db6d4a1f5714c
|
[
"MIT"
] | null | null | null |
import json
import urllib.request
from bottoman import TwitchBot
import config
from db.db import DatabaseManager
def get_user_id_display(user):
"""
uses twitch's API to get a user's token with their (case insensitive)
user name
"""
client_id = config.client_id
token = "srtajsl3jjbhhtfrvk0dlsu33aytv2"
header = {"Client-Id": client_id, "Authorization": f'Bearer {token}'}
url = f'https://api.twitch.tv/helix/users?login={user}'
req = urllib.request.Request(url, headers=header)
response = urllib.request.urlopen(req).read().decode('utf-8')
response = json.loads(response)
return (int(response['data'][0]['id']),
response['data'][0]['display_name'])
def check_admin():
"""
check for an admin. If no admin user, ask for one and add
to chatters db.
"""
dbmgr = DatabaseManager('db/bottoman.db')
permissions_list = [i[0] for i in dbmgr.query(
"SELECT permissions \
FROM chatters").fetchall()]
if 'admin' in permissions_list:
return
else:
admin_flag = False
while admin_flag is False:
admin = input(f"This bot has no admin. Please enter the name of \
your twitch channel: ")
double_check = input(f'The admin account will be {admin}. Is \
this correct? (y/n): ')
if double_check.lower() == "y":
user_id, name = get_user_id_display(admin)
dbmgr.write(
"INSERT INTO chatters \
VALUES (?,?,?,?,?,?)",
(user_id, name.lower(), name, 'admin', 1, 0,))
dbmgr.close()
admin_flag = True
elif double_check.lower() == "n":
continue
else:
print(f"Please try again.")
continue
return
# check for admin, initialize bot, join room, send hello message
check_admin()
bottoman = TwitchBot()
bottoman.join_room(bottoman.s)
bottoman.send_message(config.join_msg)
bottoman.run_time()
| 29.459459
| 77
| 0.559174
| 253
| 2,180
| 4.715415
| 0.454545
| 0.020117
| 0.015088
| 0.026823
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.007524
| 0.329358
| 2,180
| 73
| 78
| 29.863014
| 0.808482
| 0.099541
| 0
| 0.125
| 0
| 0
| 0.094595
| 0.015593
| 0
| 0
| 0
| 0
| 0
| 1
| 0.041667
| false
| 0
| 0.104167
| 0
| 0.208333
| 0.020833
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
9b9e8192f42c44f946f004808b8e37a13a83e0b0
| 478
|
py
|
Python
|
waveshare_snake.py
|
AndrewCarterUK/MiniGame
|
6d699c045e84ee3834f23eb0483245195438eff7
|
[
"MIT"
] | null | null | null |
waveshare_snake.py
|
AndrewCarterUK/MiniGame
|
6d699c045e84ee3834f23eb0483245195438eff7
|
[
"MIT"
] | null | null | null |
waveshare_snake.py
|
AndrewCarterUK/MiniGame
|
6d699c045e84ee3834f23eb0483245195438eff7
|
[
"MIT"
] | null | null | null |
from minigame.waveshare.button import Button
from minigame.waveshare.display import Display
from minigame.games.snake import Snake
WIDTH = 20
HEIGHT = 20
STEP_TIME = 0.5
BLOCK_SIZE = 32
def main():
display = Display()
l_button = Button(5)
r_button = Button(26)
u_button = Button(6)
d_button = Button(19)
snake = Snake(display, WIDTH, HEIGHT, STEP_TIME, l_button, r_button, u_button, d_button)
snake.play()
if __name__ == '__main__':
main()
| 19.916667
| 92
| 0.698745
| 70
| 478
| 4.5
| 0.428571
| 0.152381
| 0.133333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.036554
| 0.198745
| 478
| 23
| 93
| 20.782609
| 0.785901
| 0
| 0
| 0
| 0
| 0
| 0.016736
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.058824
| false
| 0
| 0.176471
| 0
| 0.235294
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
9b9f8f5cfe54f976ada38f7cf0db4c9ffc2c1571
| 8,636
|
py
|
Python
|
jumodjango/urls.py
|
jumoconnect/openjumo
|
828d993bfbb83777d10a68de6964c7d5bb2c7bd0
|
[
"MIT"
] | 5
|
2015-03-11T18:59:46.000Z
|
2018-08-17T17:49:45.000Z
|
jumodjango/urls.py
|
kmrifat/openjumo
|
828d993bfbb83777d10a68de6964c7d5bb2c7bd0
|
[
"MIT"
] | 2
|
2020-06-05T16:52:17.000Z
|
2021-02-08T20:24:26.000Z
|
jumodjango/urls.py
|
kmrifat/openjumo
|
828d993bfbb83777d10a68de6964c7d5bb2c7bd0
|
[
"MIT"
] | 6
|
2016-02-04T00:45:30.000Z
|
2021-07-07T17:14:50.000Z
|
from api.api_v1 import api_urls
from django.conf.urls.defaults import *
from django.conf import settings
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
)
''' RANDOM URLs '''
urlpatterns += patterns('etc.views',
url(r'^about/?$', 'about', name = 'about'),
url(r'^help/?$', 'help', name = 'help'),
url(r'^jobs/?$', 'jobs', name = 'jobs'),
url(r'^team/?$', 'team', name = 'team'),
url(r'^blog/?$', 'blog', name = 'blog'),
url(r'^contact/?$', 'contact', name = 'contact'),
url(r'^privacy/?$', 'privacy', name = 'privacy'),
url(r'^terms/?$', 'terms', name = 'terms'),
url(r'^/?$', 'index', name = 'index'),
url(r'^error/?$', 'throw_error', name = 'throw_error'),
url(r'^health_check/?$', 'health_check', name = 'health_check'),
)
''' END OF RANDOM URLs '''
''' API URLS '''
urlpatterns += patterns('',
(r'^api/', include(api_urls())),
)
''' END API URLS '''
''' USER URLs '''
urlpatterns += patterns('users.views',
url(r'^login/?$', 'login_permalink', name = 'login_permalink'),
url(r'^logout/?$', 'logout_permalink', name = 'logout_permalink'),
url(r'^setup/?$', 'setup', name = 'setup'),
url(r'^discover/?$', 'discover', name = 'discover'),
url(r'^user/(?P<mongo_id>[a-zA-Z0-9\-_].*)/?$', 'old_user_permalink', name = 'old_user_permalink'),
url(r'^forgot_password/?$', 'forgot_password', name = 'forgot_password'),
url(r'^reset_password/(?P<reset_id>[a-fA-F0-9].*)/?$', 'reset_password', name = 'reset_password'),
url(r'^upload_photo/?$', 'upload_photo', name = 'upload_photo'),
url(r'^settings/?$', 'settings', name='settings'),
url(r'^settings/notifications/?$', 'notifications', name='settings_notifications'),
url(r'^settings/connect/?$', 'connect', name='settings_connect'),
url(r'^settings/developer/?$', 'developer', name='settings_developer'),
url(r'^users/(?P<user_id>\d*)/follow/?$', 'follow', name='follow_user'),
url(r'^users/(?P<user_id>\d*)/unfollow/?$', 'unfollow', name='unfollow_user'),
url(r'^users/(?P<user_id>\d*)/followers/?$', 'follower_list', name='user_followers'),
url(r'^users/(?P<user_id>\d*)/followings/?$', 'following_list', name='user_followings'),
url(r'^remove/?$', 'remove_user', name='remove_user')
)
urlpatterns += patterns('users.ajax.views',
url(r'^json/v1/user/fbid_check/?$', 'check_fbid', name = 'check_fbid'),
url(r'^json/v1/user/fb_login/?$', 'fb_login', name = 'fb_login'),
url(r'^json/v1/user/fbot_update/?$', 'fbot_update', name = 'fbot_update'),
url(r'^json/v1/user/update/?$', 'update_user', name = 'update_user'),
url(r'^json/v1/user/remove/?$', 'remove_user', name = 'remove_user'),
url(r'^json/v1/user/reset_password/?$', 'reset_password', name = 'reset_password'),
url(r'^json/v1/user/forgot_password/?$', 'forgot_password', name = 'forgot_password'),
url(r'^json/v1/user/action/follow/?$', 'follow', name = 'follow'),
)
''' END OF USER URLs '''
''' ISSUE URLs '''
urlpatterns += patterns('issue.views',
url(r'^issue/(?P<mongo_id>[a-zA-Z0-9\-_].*)/?$', 'old_issue_permalink', name = 'old_issue_permalink'),
url(r'^issuename/(?P<issuename>[a-zA-Z0-9\-_\ ].*)/?$', 'old_issuename_permalink', name = 'old_issuename_permalink'),
url(r'^users/(?P<user_id>\d*)/issues/?$', 'followed_issue_list', name='followed_issue_list')
)
''' ISSUE URLs '''
''' ORG URLs '''
urlpatterns += patterns('org.views',
url(r'^org/categories.js$', 'org_categories', name = 'org_categories'),
url(r'^org/claim/(?P<org_id>[0-9a-zA-Z\-_].*)/confirm/?$', 'claim_org_confirm', name = 'claim_org_confirm'),
url(r'^org/claim/(?P<org_id>[0-9a-zA-Z\-_].*)/?$', 'claim_org', name = 'claim_org'),
url(r'^org/create/?$', 'create_org', name = 'create_org'),
url(r'^org/(?P<org_id>\d.*)/details/?$', 'details', name='details_org'),
url(r'^org/(?P<org_id>[0-9a-zA-Z\-_].*)/manage/?$', 'manage_org', {'tab': 'about'}, name='manage_org'),
url(r'^org/(?P<org_id>[0-9a-zA-Z\-_].*)/manage/connect/?$', 'manage_org', {'tab': 'connect'}, name='manage_org_connect'),
url(r'^org/(?P<org_id>[0-9a-zA-Z\-_].*)/manage/more/?$', 'manage_org', {'tab': 'more'}, name='manage_org_more'),
url(r'^org/(?P<mongo_id>[a-zA-Z0-9\-_].*)/?$', 'old_org_permalink', name = 'old_org_permalink'),
url(r'^orgname/(?P<orgname>[a-zA-Z0-9\-_\ ].*)/?$', 'old_orgname_permalink', name = 'old_orgname_permalink'),
url(r'^users/(?P<user_id>\d*)/orgs/?$', 'followed_org_list', name='followed_org_list')
)
urlpatterns += patterns('org.ajax.views',
url(r'^json/v1/org/fetch_centroid/?$', 'fetch_org_by_centroid', name = 'fetch_org_by_centroid'),
url(r'^json/v1/org/update/?$', 'update_org', name = 'update_org'),
url(r'^json/v1/org/remove/?$', 'remove_org', name = 'remove_org'),
url(r'^json/v1/org/flag/?$', 'flag_org', name = 'flag_org'),
url(r'^json/v1/org/create/?$', 'org_create', name = 'org_create'),
url(r'^json/v1/org/normalize_facebook_id/?$', 'normalize_facebook_id', name = 'normalize_facebook_id'),
)
''' END OF ORG URLs '''
''' COMMITMENT URLS '''
urlpatterns += patterns('commitment.views',
url(r'^commitments/create/?$', 'create', name='create_commitment'),
url(r'^commitments/(?P<commitment_id>\d*)/delete/?$', 'delete', name='delete_commitment'),
url(r'^orgs/(?P<entity_id>\d*)/commitments/?$', 'list', {'model_name': 'org.Org'}, name='org_commitments'),
url(r'^issues/(?P<entity_id>\d*)/commitments/?$', 'list', {'model_name': 'issue.Issue'}, name='issue_commitments'),
)
''' ACTION URLS '''
urlpatterns += patterns('action.views',
url(r'^orgs/(?P<entity_id>\d*)/actions/?$', 'action_list', {'model_name': 'org.Org'}, name='org_action_list'),
url(r'^issues/(?P<entity_id>\d*)/actions/?$', 'action_list', {'model_name': 'issue.Issue'}, name='issue_action_list'),
)
''' SEARCH URLS '''
urlpatterns += patterns('search.views',
url(r'^json/v1/search/onebox/?$', 'autocomplete', name = 'autocomplete'),
url(r'^search/?$', 'search_page', name='search_page'),
url(r'^json/v1/search/?$', 'ajax_search', name='ajax_search'),
url(r'^json/v1/autocomplete/?$', 'ajax_term_complete', name='ajax_term_complete')
)
''' MAILER URLS '''
urlpatterns += patterns('mailer.views',
url(r'^unsubscribe/$', 'unsubscribe', name='unsubscribe'),
url(r'^email/text/(?P<username>[a-zA-Z0-9\-_\ ].*)/?$', 'jumo_reader', name = 'jumo_reader'),
url(r'^email/(?P<username>[a-zA-Z0-9\-_\ ].*)/?$', 'jumo_reader', name = 'jumo_reader'),
#url(r'^notification/(?P<username>[a-zA-Z0-9\-_\ ].*)/?$', 'notification_email', name = 'notification_email'),
)
''' END MAILER URLS '''
''' ADMIN URLS '''
urlpatterns += patterns('',
(r'^admin/org/report/$', 'org.admin_views.report'),
(r'^grappelli/', include('grappelli.urls')),
(r'^admin/', include(admin.site.urls)),
)
if settings.IS_DATAMINE:
urlpatterns += patterns('miner.views',
url(r'^related_searches/?$', 'related_searches', name='related_searches')
)
#if settings.DEBUG:
if True:
urlpatterns += patterns('django.views.static',
(r'^static/(?P<path>.*)$',
'serve', {
'document_root': settings.MEDIA_ROOT,
'show_indexes': True }),)
handler500 = 'etc.views.error_500'
handler404 = 'etc.views.error_404'
'''
#########################################################################################
### HEY #########################################################################
################################################## SEE ALL THEM POUND SIGNS? ############
#########################################################################################
############### THAT MEANS THIS IS AN IMPORTANT MSG #####################################
#########################################################################################
################################# SO PAY ATTENTION OK? ##################################
#########################################################################################
####### EVERYTHING WILL BREAK IF THIS ISN'T THE LAST LINE OF CODE IN THIS FILE. #
#########################################################################################
################################## WE COOL? #############################################
#########################################################################################
'''
urlpatterns += patterns('etc.views',
url(r'^([a-zA-Z0-9\-_].*)/?$', 'clean_url', name = 'entity_url'),
)
| 45.452632
| 125
| 0.54736
| 1,036
| 8,636
| 4.375483
| 0.168919
| 0.063534
| 0.030002
| 0.037503
| 0.269799
| 0.241121
| 0.180454
| 0.132363
| 0.085374
| 0.04743
| 0
| 0.007947
| 0.125753
| 8,636
| 189
| 126
| 45.693122
| 0.59245
| 0.014706
| 0
| 0.033613
| 0
| 0.02521
| 0.554149
| 0.240225
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.033613
| 0.033613
| 0
| 0.033613
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
9ba08896288342be18a3bdfe4d777157062a927c
| 2,830
|
py
|
Python
|
modules/layers.py
|
vliu15/munit
|
5789d96590519d729f89c9501eba7692fa7054ef
|
[
"MIT"
] | 3
|
2021-03-04T01:48:03.000Z
|
2021-12-16T06:55:10.000Z
|
modules/layers.py
|
vliu15/munit
|
5789d96590519d729f89c9501eba7692fa7054ef
|
[
"MIT"
] | null | null | null |
modules/layers.py
|
vliu15/munit
|
5789d96590519d729f89c9501eba7692fa7054ef
|
[
"MIT"
] | null | null | null |
# The MIT License
#
# Copyright (c) 2020 Vincent Liu
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import torch
import torch.nn as nn
class AdaptiveInstanceNorm2d(nn.Module):
''' Implements 2D Adaptive Instance Normalization '''
def __init__(self, channels, s_dim=8, h_dim=256):
super().__init__()
self.instance_norm = nn.InstanceNorm2d(channels, affine=False)
self.style_scale_transform = self.mlp(s_dim, h_dim, channels)
self.style_shift_transform = self.mlp(s_dim, h_dim, channels)
@staticmethod
def mlp(self, in_dim, h_dim, out_dim):
return nn.Sequential(
nn.Linear(in_dim, h_dim),
nn.ReLU(inplace=True),
nn.Linear(h_dim, h_dim),
nn.ReLU(inplace=True),
nn.Linear(h_dim, out_dim),
)
def forward(self, image, w):
normalized_image = self.instance_norm(image)
style_scale = self.style_scale_transform(w)[:, :, None, None]
style_shift = self.style_shift_transform(w)[:, :, None, None]
transformed_image = style_scale * normalized_image + style_shift
return transformed_image
class LayerNorm2d(nn.Module):
''' Implements 2D Layer Normalization '''
def __init__(self, channels, eps=1e-5, affine=True):
super().__init__()
self.affine = affine
self.eps = eps
if self.affine:
self.gamma = nn.Parameter(torch.rand(channels))
self.beta = nn.Parameter(torch.zeros(channels))
def forward(self, x):
mean = x.flatten(1).mean(1).reshape(-1, 1, 1, 1)
std = x.flatten(1).std(1).reshape(-1, 1, 1, 1)
x = (x - mean) / (std + self.eps)
if self.affine:
x = x * self.gamma.reshape(1, -1, 1, 1) + self.beta.reshape(1, -1, 1, 1)
return x
| 36.753247
| 84
| 0.674558
| 403
| 2,830
| 4.615385
| 0.389578
| 0.012903
| 0.012903
| 0.021505
| 0.130645
| 0.084409
| 0.071505
| 0.071505
| 0.037097
| 0.037097
| 0
| 0.016004
| 0.227208
| 2,830
| 76
| 85
| 37.236842
| 0.834476
| 0.40636
| 0
| 0.157895
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.131579
| false
| 0
| 0.052632
| 0.026316
| 0.315789
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|