max_stars_repo_path stringlengths 4 286 | max_stars_repo_name stringlengths 5 119 | max_stars_count int64 0 191k | id stringlengths 1 7 | content stringlengths 6 1.03M | content_cleaned stringlengths 6 1.03M | language stringclasses 111 values | language_score float64 0.03 1 | comments stringlengths 0 556k | edu_score float64 0.32 5.03 | edu_int_score int64 0 5 |
|---|---|---|---|---|---|---|---|---|---|---|
ramsey/RamseyGame.py | bzhaocaltech/alpha-zero-ramsey-numbers | 0 | 6619051 | from copy import deepcopy
import sys
sys.path.append('..')
from Game import Game
from Graph import Graph
class RamseyGame(Game):
def __init__(self, n, p, q):
assert p >= 2 and q >= 2
self.n = n
# For indexing purposes colors keys must be [1, 2, ...]
self.colors = {1: p, 2: q}
# Maps index to action
self.index_to_action = []
for c in self.colors:
for i in range(n):
for j in range(i + 1, n):
self.index_to_action.append((i, j, c))
# Maps action to index in action vector
def action_to_index(self, i, j, c):
assert i != j
if i > j:
i, j = j, i
return self.n * i + j + ((c-1) * self.n * (self.n-1) - (i+1) * (i+2)) // 2
# Return a graph representing the initial board state
def getInitGraph(self):
# return initial graph (numpy graph)
return Graph(self.n)
# Get the graph size
def getGraphSize(self):
return self.n
def getActionSize(self):
return len(self.index_to_action)
def getNextStateFromAction(self, graph, action):
i, j, c = action
new_graph = deepcopy(graph)
new_graph.colorEdge(i, j, c, self.colors)
return new_graph
# Given a graph and an action, returns a new graph after that action has
# been made
def getNextState(self, graph, index):
return self.getNextStateFromAction(graph, self.index_to_action[index])
# Get all valid actions one-hot encoded
def getValidMoves(self, graph):
# return a fixed size binary vector
valid = [0] * self.getActionSize()
for i, j in graph.edgeIter():
if not graph.hasEdge(i, j):
for c in self.colors:
valid[self.action_to_index(i, j, c)] = 1
return valid
# Check if state is terminal by checking for monochromatic cliques of given size and color
# and if there are uncolored edges remaining
def getGameEnded(self, graph):
if graph.has_clique:
return True
return graph.num_edges == graph.total_edges
def stringRepresentation(self, graph):
return str(graph)
# Get the score of a graph. Equivalent to number of edges in the graph
# minus an additional 1 if the graph has a clique
def getScore(self, graph):
reward = graph.num_edges
return reward - 1 if graph.has_clique else reward
def getCanonicalForm(self, graph):
return graph.adj_mat
| from copy import deepcopy
import sys
sys.path.append('..')
from Game import Game
from Graph import Graph
class RamseyGame(Game):
def __init__(self, n, p, q):
assert p >= 2 and q >= 2
self.n = n
# For indexing purposes colors keys must be [1, 2, ...]
self.colors = {1: p, 2: q}
# Maps index to action
self.index_to_action = []
for c in self.colors:
for i in range(n):
for j in range(i + 1, n):
self.index_to_action.append((i, j, c))
# Maps action to index in action vector
def action_to_index(self, i, j, c):
assert i != j
if i > j:
i, j = j, i
return self.n * i + j + ((c-1) * self.n * (self.n-1) - (i+1) * (i+2)) // 2
# Return a graph representing the initial board state
def getInitGraph(self):
# return initial graph (numpy graph)
return Graph(self.n)
# Get the graph size
def getGraphSize(self):
return self.n
def getActionSize(self):
return len(self.index_to_action)
def getNextStateFromAction(self, graph, action):
i, j, c = action
new_graph = deepcopy(graph)
new_graph.colorEdge(i, j, c, self.colors)
return new_graph
# Given a graph and an action, returns a new graph after that action has
# been made
def getNextState(self, graph, index):
return self.getNextStateFromAction(graph, self.index_to_action[index])
# Get all valid actions one-hot encoded
def getValidMoves(self, graph):
# return a fixed size binary vector
valid = [0] * self.getActionSize()
for i, j in graph.edgeIter():
if not graph.hasEdge(i, j):
for c in self.colors:
valid[self.action_to_index(i, j, c)] = 1
return valid
# Check if state is terminal by checking for monochromatic cliques of given size and color
# and if there are uncolored edges remaining
def getGameEnded(self, graph):
if graph.has_clique:
return True
return graph.num_edges == graph.total_edges
def stringRepresentation(self, graph):
return str(graph)
# Get the score of a graph. Equivalent to number of edges in the graph
# minus an additional 1 if the graph has a clique
def getScore(self, graph):
reward = graph.num_edges
return reward - 1 if graph.has_clique else reward
def getCanonicalForm(self, graph):
return graph.adj_mat
| en | 0.864355 | # For indexing purposes colors keys must be [1, 2, ...] # Maps index to action # Maps action to index in action vector # Return a graph representing the initial board state # return initial graph (numpy graph) # Get the graph size # Given a graph and an action, returns a new graph after that action has # been made # Get all valid actions one-hot encoded # return a fixed size binary vector # Check if state is terminal by checking for monochromatic cliques of given size and color # and if there are uncolored edges remaining # Get the score of a graph. Equivalent to number of edges in the graph # minus an additional 1 if the graph has a clique | 3.559191 | 4 |
src/nrl/walker/walkers.py | cthoyt/nrl | 3 | 6619052 | <reponame>cthoyt/nrl<filename>src/nrl/walker/walkers.py
# -*- coding: utf-8 -*-
"""Implementations of random walk algorithms."""
import random
import igraph
import networkx
import numpy as np
from .utils import Walker
from ..typing import Walk
__all__ = [
'StandardRandomWalker',
'RestartingRandomWalker',
'BiasedRandomWalker',
]
class StandardRandomWalker(Walker):
"""Make standard random walks, choosing the neighbors at a given position uniformly."""
def get_igraph_walk(self, graph: igraph.Graph, vertex: igraph.Vertex) -> Walk:
"""Get a random walk by choosing from the neighbors at a given position uniformly."""
tail = vertex
yield tail['label']
path_length = 1
# return if the the current path is too long or there if there are no neighbors at the end
while path_length < self.parameters.max_path_length and graph.neighborhood_size(tail):
tail = random.choice(tail.neighbors())
yield tail['label']
path_length += 1
def get_networkx_walk(self, graph: networkx.Graph, vertex: str) -> Walk:
tail = vertex
yield tail
path_length = 1
# return if the the current path is too long or there if there are no neighbors at the end
while path_length < self.parameters.max_path_length and graph.neighbors(tail):
tail = random.choice(list(graph.neighbors(tail)))
yield tail
path_length += 1
class RestartingRandomWalker(Walker):
"""A random walker that restarts from the original vertex with a given probability."""
@property
def restart_probability(self) -> float:
"""Get the probability with which this walker will restart from the original vertex."""
return self.parameters.restart_probability
def get_igraph_walk(self, graph: igraph.Graph, vertex: igraph.Vertex) -> Walk:
"""Generate one random walk for one vertex, with the probability, alpha, of restarting."""
tail = vertex
yield tail['label']
path_length = 1
while path_length < self.parameters.max_path_length and graph.neighborhood_size(tail):
tail = (
vertex
if self.restart_probability <= random.random() else
random.choice(tail.neighbors())
)
yield tail['label']
path_length += 1
def get_networkx_walk(self, graph: networkx.Graph, vertex: str) -> Walk:
"""Generate one random walk for one vertex, with the probability, alpha, of restarting."""
tail = vertex
yield tail
path_length = 1
while path_length < self.parameters.max_path_length and graph.neighbors(tail):
tail = (
vertex
if self.restart_probability <= random.random() else
random.choice(list(graph.neighbors(tail)))
)
yield tail
path_length += 1
class BiasedRandomWalker(Walker):
"""A random walker that generates second-order random walks biased by edge weights."""
NUM_WALKS_KEY = 'num_walks'
WALK_LENGTH_KEY = 'walk_length'
PROBABILITIES_KEY = 'probabilities'
FIRST_TRAVEL_KEY = 'first_travel_key'
@property
def sampling_strategy(self):
"""Get the sampling strategy for this walker."""
return self.parameters.sampling_strategy
def _check(self, vertex):
return (
vertex in self.sampling_strategy and
self.NUM_WALKS_KEY in self.sampling_strategy[vertex] and
self.sampling_strategy[vertex][self.NUM_WALKS_KEY] <= self.parameters.number_paths
)
def get_igraph_walk(self, graph: igraph.Graph, vertex: igraph.Vertex) -> Walk:
"""Generate second-order random walks biased by edge weights."""
if self.parameters.max_path_length < 2:
raise ValueError("The path length for random walk is less than 2, which doesn't make sense")
if self._check(vertex):
return
# Start walk
yield vertex
double_tail = vertex
# Calculate walk length
if vertex in self.sampling_strategy:
walk_length = self.sampling_strategy[vertex].get(self.WALK_LENGTH_KEY, self.parameters.max_path_length)
else:
walk_length = self.parameters.max_path_length
probabilities = vertex[self.FIRST_TRAVEL_KEY]
tail = np.random.choice(vertex.neighbors(), p=probabilities)
if not tail:
return
yield tail
# Perform walk
path_length = 2
while path_length < walk_length:
neighbors = tail.neighbors()
# Skip dead end nodes
if not neighbors:
break
probabilities = tail[self.PROBABILITIES_KEY][double_tail['name']]
double_tail, tail = tail, np.random.choice(neighbors, p=probabilities)
yield tail
path_length += 1
def get_networkx_walk(self, graph: networkx.Graph, vertex: str) -> Walk:
raise NotImplementedError
| # -*- coding: utf-8 -*-
"""Implementations of random walk algorithms."""
import random
import igraph
import networkx
import numpy as np
from .utils import Walker
from ..typing import Walk
__all__ = [
'StandardRandomWalker',
'RestartingRandomWalker',
'BiasedRandomWalker',
]
class StandardRandomWalker(Walker):
"""Make standard random walks, choosing the neighbors at a given position uniformly."""
def get_igraph_walk(self, graph: igraph.Graph, vertex: igraph.Vertex) -> Walk:
"""Get a random walk by choosing from the neighbors at a given position uniformly."""
tail = vertex
yield tail['label']
path_length = 1
# return if the the current path is too long or there if there are no neighbors at the end
while path_length < self.parameters.max_path_length and graph.neighborhood_size(tail):
tail = random.choice(tail.neighbors())
yield tail['label']
path_length += 1
def get_networkx_walk(self, graph: networkx.Graph, vertex: str) -> Walk:
tail = vertex
yield tail
path_length = 1
# return if the the current path is too long or there if there are no neighbors at the end
while path_length < self.parameters.max_path_length and graph.neighbors(tail):
tail = random.choice(list(graph.neighbors(tail)))
yield tail
path_length += 1
class RestartingRandomWalker(Walker):
"""A random walker that restarts from the original vertex with a given probability."""
@property
def restart_probability(self) -> float:
"""Get the probability with which this walker will restart from the original vertex."""
return self.parameters.restart_probability
def get_igraph_walk(self, graph: igraph.Graph, vertex: igraph.Vertex) -> Walk:
"""Generate one random walk for one vertex, with the probability, alpha, of restarting."""
tail = vertex
yield tail['label']
path_length = 1
while path_length < self.parameters.max_path_length and graph.neighborhood_size(tail):
tail = (
vertex
if self.restart_probability <= random.random() else
random.choice(tail.neighbors())
)
yield tail['label']
path_length += 1
def get_networkx_walk(self, graph: networkx.Graph, vertex: str) -> Walk:
"""Generate one random walk for one vertex, with the probability, alpha, of restarting."""
tail = vertex
yield tail
path_length = 1
while path_length < self.parameters.max_path_length and graph.neighbors(tail):
tail = (
vertex
if self.restart_probability <= random.random() else
random.choice(list(graph.neighbors(tail)))
)
yield tail
path_length += 1
class BiasedRandomWalker(Walker):
"""A random walker that generates second-order random walks biased by edge weights."""
NUM_WALKS_KEY = 'num_walks'
WALK_LENGTH_KEY = 'walk_length'
PROBABILITIES_KEY = 'probabilities'
FIRST_TRAVEL_KEY = 'first_travel_key'
@property
def sampling_strategy(self):
"""Get the sampling strategy for this walker."""
return self.parameters.sampling_strategy
def _check(self, vertex):
return (
vertex in self.sampling_strategy and
self.NUM_WALKS_KEY in self.sampling_strategy[vertex] and
self.sampling_strategy[vertex][self.NUM_WALKS_KEY] <= self.parameters.number_paths
)
def get_igraph_walk(self, graph: igraph.Graph, vertex: igraph.Vertex) -> Walk:
"""Generate second-order random walks biased by edge weights."""
if self.parameters.max_path_length < 2:
raise ValueError("The path length for random walk is less than 2, which doesn't make sense")
if self._check(vertex):
return
# Start walk
yield vertex
double_tail = vertex
# Calculate walk length
if vertex in self.sampling_strategy:
walk_length = self.sampling_strategy[vertex].get(self.WALK_LENGTH_KEY, self.parameters.max_path_length)
else:
walk_length = self.parameters.max_path_length
probabilities = vertex[self.FIRST_TRAVEL_KEY]
tail = np.random.choice(vertex.neighbors(), p=probabilities)
if not tail:
return
yield tail
# Perform walk
path_length = 2
while path_length < walk_length:
neighbors = tail.neighbors()
# Skip dead end nodes
if not neighbors:
break
probabilities = tail[self.PROBABILITIES_KEY][double_tail['name']]
double_tail, tail = tail, np.random.choice(neighbors, p=probabilities)
yield tail
path_length += 1
def get_networkx_walk(self, graph: networkx.Graph, vertex: str) -> Walk:
raise NotImplementedError | en | 0.876368 | # -*- coding: utf-8 -*- Implementations of random walk algorithms. Make standard random walks, choosing the neighbors at a given position uniformly. Get a random walk by choosing from the neighbors at a given position uniformly. # return if the the current path is too long or there if there are no neighbors at the end # return if the the current path is too long or there if there are no neighbors at the end A random walker that restarts from the original vertex with a given probability. Get the probability with which this walker will restart from the original vertex. Generate one random walk for one vertex, with the probability, alpha, of restarting. Generate one random walk for one vertex, with the probability, alpha, of restarting. A random walker that generates second-order random walks biased by edge weights. Get the sampling strategy for this walker. Generate second-order random walks biased by edge weights. # Start walk # Calculate walk length # Perform walk # Skip dead end nodes | 3.797038 | 4 |
signalsdb/api.py | eugene-eeo/signalsdb | 6 | 6619053 | <reponame>eugene-eeo/signalsdb
"""
signalsdb.api
~~~~~~~~~~~~~
Exports the public API.
"""
import re
from signalsdb.db import SIGNALS
__all__ = ('explain', 'search')
class NoSuchSignal(KeyError):
"""
The given signal wasn't found in the DB.
"""
pass
def explain(code, signals=SIGNALS):
"""
Explain what a given integer signal *code* does,
including it's signal name.
:param code: An integer signal.
:param signals: A database of signals.
"""
if code not in signals:
raise NoSuchSignal(code)
signal, action, description = signals[code]
return {
'id': code,
'signal': signal,
'action': action,
'description': description,
}
def search(signal='', action='', signals=SIGNALS):
"""
Search the signals DB for signal named *signal*,
and which action matches *action* in a case
insensitive way.
:param signal: Regex for signal name.
:param action: Regex for default action.
:param signals: Database of signals.
"""
sig_re = re.compile(signal, re.IGNORECASE)
act_re = re.compile(action, re.IGNORECASE)
res = []
for code in signals:
sig, act, _ = signals[code]
if sig_re.match(sig) and act_re.match(act):
res.append(explain(code, signals=signals))
return res
| """
signalsdb.api
~~~~~~~~~~~~~
Exports the public API.
"""
import re
from signalsdb.db import SIGNALS
__all__ = ('explain', 'search')
class NoSuchSignal(KeyError):
"""
The given signal wasn't found in the DB.
"""
pass
def explain(code, signals=SIGNALS):
"""
Explain what a given integer signal *code* does,
including it's signal name.
:param code: An integer signal.
:param signals: A database of signals.
"""
if code not in signals:
raise NoSuchSignal(code)
signal, action, description = signals[code]
return {
'id': code,
'signal': signal,
'action': action,
'description': description,
}
def search(signal='', action='', signals=SIGNALS):
"""
Search the signals DB for signal named *signal*,
and which action matches *action* in a case
insensitive way.
:param signal: Regex for signal name.
:param action: Regex for default action.
:param signals: Database of signals.
"""
sig_re = re.compile(signal, re.IGNORECASE)
act_re = re.compile(action, re.IGNORECASE)
res = []
for code in signals:
sig, act, _ = signals[code]
if sig_re.match(sig) and act_re.match(act):
res.append(explain(code, signals=signals))
return res | en | 0.882888 | signalsdb.api ~~~~~~~~~~~~~ Exports the public API. The given signal wasn't found in the DB. Explain what a given integer signal *code* does, including it's signal name. :param code: An integer signal. :param signals: A database of signals. Search the signals DB for signal named *signal*, and which action matches *action* in a case insensitive way. :param signal: Regex for signal name. :param action: Regex for default action. :param signals: Database of signals. | 3.044093 | 3 |
app/service/system.py | ST4NSB/music-recommendation-system | 0 | 6619054 | <filename>app/service/system.py
from app.deleteuser import DeleteUser
from app.repository.elasticsearch_context import ESContext
from app.repository.db_context import DBContext
from logging import Logger
from types import FunctionType
from typing import Any, Dict, List, Optional, Tuple
from flask import abort
import requests, random, re
import pandas as pd
from app.service.distance import Distance
from app.service.utils import Utils
from collections import defaultdict, namedtuple
from googlesearch import search
from bs4 import BeautifulSoup
from app.service.central_tendency import CentralTendencies
from elasticsearch import Elasticsearch
class RecommendationSystem:
def __init__(self, logger: Logger, db: DBContext, es: ESContext, cfg: Dict, rpath: str, yt_api_key: str):
self.logger = logger
self.db = db
self.cfg = cfg
self.rpath = rpath
self.yt_api_key = yt_api_key
self.es = es
self.__songs_dataset = self.__get_processed_dataset()
self.es.configure(self.__songs_dataset)
self.logger.info(f" * Loaded items in Elastic Search")
self.logger.info(f" * Number of songs: {len(self.__songs_dataset)}")
self.logger.info(
f" * First 10 songs from dataset: { list(self.__songs_dataset.items())[0:10] }"
)
def __get_processed_dataset(self) -> Dict:
feature_json = Utils.read_json(filename=self.cfg['dataset']['curated'])
if feature_json:
return feature_json
df = Utils.read_csv(self.cfg['dataset']['original'])
norm_data = self.__get_minmax_values(df)
self.logger.info(f"Min max values for normalization: {norm_data}")
songs, existing_songs = {}, set()
for _, row in df.iterrows():
if row['year'] < self.cfg['distance_algorithm']['threshold_year']:
continue
if Utils.get_curated_name_dataset(row['artists'], row['name']) in existing_songs:
continue
existing_songs.add(Utils.get_curated_name_dataset(row['artists'], row['name']))
songs[row['id']] = {
'name': Utils.get_curated_name_dataset(row['artists'], row['name'], row['year']),
'artists': Utils.split_artists(row['artists']),
'feature_array': [
self.__compute_feature_value(row['acousticness'], norm_data['acousticness'], self.cfg['weights']['acousticness']),
self.__compute_feature_value(row['danceability'], norm_data['danceability'], self.cfg['weights']['danceability']),
self.__compute_feature_value(row['energy'], norm_data['energy'], self.cfg['weights']['energy']),
self.__compute_feature_value(row['instrumentalness'], norm_data['instrumentalness'], self.cfg['weights']['instrumentalness']),
self.__compute_feature_value(row['valence'], norm_data['valence'], self.cfg['weights']['valence']),
self.__compute_feature_value(row['tempo'], norm_data['tempo'], self.cfg['weights']['tempo']),
self.__compute_feature_value(row['liveness'], norm_data['liveness'], self.cfg['weights']['liveness']),
self.__compute_feature_value(row['loudness'], norm_data['loudness'], self.cfg['weights']['loudness']),
self.__compute_feature_value(row['speechiness'], norm_data['speechiness'], self.cfg['weights']['speechiness']),
self.__compute_feature_value(row['mode'], norm_data['mode'], self.cfg['weights']['mode']),
self.__compute_feature_value(row['popularity'], norm_data['popularity'], self.cfg['weights']['popularity']),
self.__compute_feature_value(row['year'], norm_data['year'], self.cfg['weights']['year']),
]
}
Utils.save_json(songs, self.rpath, filename=self.cfg['dataset']['curated'])
return songs
def __get_minmax_values(self, df: Any) -> Dict:
Minmax = namedtuple('Minmax', ('min, max'))
return {
'acousticness': Minmax(min=min(df['acousticness']), max=max(df['acousticness'])),
'danceability': Minmax(min=min(df['danceability']), max=max(df['danceability'])),
'energy': Minmax(min=min(df['energy']), max=max(df['energy'])),
'mode': Minmax(min=min(df['mode']), max=max(df['mode'])),
'tempo': Minmax(min=min(df['tempo']), max=max(df['tempo'])),
'instrumentalness': Minmax(min=min(df['instrumentalness']), max=max(df['instrumentalness'])),
'speechiness': Minmax(min=min(df['speechiness']), max=max(df['speechiness'])),
'loudness': Minmax(min=min(df['loudness']), max=max(df['loudness'])),
'liveness': Minmax(min=min(df['liveness']), max=max(df['liveness'])),
'valence': Minmax(min=min(df['valence']), max=max(df['valence'])),
'popularity': Minmax(min=min(df['popularity']), max=max(df['popularity'])),
'year': Minmax(min=self.cfg['distance_algorithm']['threshold_year'], max=max(df['year'])),
}
def __compute_feature_value(self, row, normalized_data_tuple, weight) -> float:
normalized_value = Utils.normalize(row, normalized_data_tuple.min, normalized_data_tuple.max)
return normalized_value * weight
def delete_user(self, user_id: str) -> str:
self.db.delete_user(user_id)
def get_random_songs(self, processed_songs: Dict) -> List[Dict]:
search_res = self.es.get_random_items()
results = []
for sr in search_res['hits']['hits']:
if len(results) >= self.cfg['distance_algorithm']['query_songs_limit']:
break
if sr['_id'] in processed_songs['liked'] or sr['_id'] in processed_songs['skipped']:
continue
if Utils.get_year_from_name(sr['_source']['name']) < 2015:
if random.uniform(0, 1) < 0.8:
continue
results.append({
"id": sr['_id'],
"name": sr['_source']['name'],
"youtubeId": self.__get_videoId(sr['_source']['name'])
})
return results
def get_song_names(self, search_query: str) -> List[Dict]:
search_res = self.es.get_item_by_query(search_query, self.cfg['distance_algorithm']['query_songs_limit'])
if len(search_res) == 0:
abort(404, f"Couldn't find any songs for '{search_query}'")
results = []
for sr in search_res['hits']['hits']:
results.append({
"id": sr['_id'],
"name": sr['_source']['name'],
"youtubeId": self.__get_videoId(sr['_source']['name'])
})
return results
def get_next_song(self, processed_songs: Dict) -> Dict:
song_threshold = self.cfg['distance_algorithm']['minimmum_songs']
if len(processed_songs['liked']) < song_threshold:
abort(400, f"There are not enough liked songs in the api request, min: {song_threshold} liked songs!")
user_id = processed_songs['userId']
if self.db.user_has_songs(user_id) and self.db.get_liked_nr_songs(user_id) == len(processed_songs['liked']):
next_song = self.__get_song_from_db(processed_songs, user_id)
if next_song:
next_song['youtubeId'] = self.__get_videoId(next_song['name'])
self.logger.info(f" * [GetNextSong]Next Song: {next_song}, type: {type(next_song)}")
return next_song
tmp_dist = self.__get_all_songs_distances(
processed_songs,
distmax=eval(self.cfg['distance_algorithm']['distmax']),
distmin=eval(self.cfg['distance_algorithm']['distmin']),
eval_func=eval(self.cfg['distance_algorithm']['eval_func'])
)
if len(tmp_dist) == 0:
abort(500, "There are no more songs to recommend! Congrats, it's statistically impossible to get here!")
sorted_distances = dict(sorted(tmp_dist.items(), key=lambda item: item[1]['distance_value'], reverse=True))
distances = {A:N for (A,N) in [x for x in sorted_distances.items()][:self.cfg['distance_algorithm']['results_count']]}
self.logger.info(
f" * [GetNextSong]First {len(distances)} closest songs calculated by feature distance: { [val['name'] for val in distances.values()] }"
)
if self.db.user_has_songs(user_id):
self.db.update_user_songs(user_id, distances, len(processed_songs['liked']))
self.logger.info(" * [GetNextSong]Inserted new distances in db")
else:
user_dist = {
'user_id': user_id,
'liked_songs': len(processed_songs['liked']),
'songs': distances
}
self.db.insert_user_songs(user_dist)
self.logger.info(" * [GetNextSong]Updated new distances in db")
result = [{'id': ID, 'name': NAME['name']} for (ID, NAME) in [x for x in sorted_distances.items()][:1]][0] # !!!! lol
result['youtubeId'] = self.__get_videoId(result['name'])
self.logger.info(f" * [GetNextSong]Result: {result}, type: {type(result)}")
return result
def __get_song_from_db(self, processed_songs: Dict, user_id: str) -> Optional[Dict]:
calculated_distances = self.db.get_user_songs(user_id)
self.logger.info(f" * [GetNextSong]Distances from db: {calculated_distances}")
next_song = None
for key in calculated_distances:
if key in processed_songs['liked'] or key in processed_songs['skipped']:
continue
else:
next_song = {
'id': key,
'name': calculated_distances[key]['name']
}
break
return next_song
def __get_all_songs_distances(self, processed_songs: Dict, distmax: FunctionType, distmin: FunctionType, eval_func: FunctionType) -> Dict:
distances, liked_songs = defaultdict(lambda: {'name': '', 'distance_value': 0}), []
for song in processed_songs['liked']:
liked_songs.append(self.__songs_dataset[song]['feature_array'])
middle_feature = eval_func(
Utils.convert_to_numpy_array(liked_songs)
)
self.logger.info(f" * [GetNextSong]Avg. feature values: {middle_feature}")
for id, details in self.__songs_dataset.items():
if id in processed_songs['skipped'] or id in processed_songs['liked']:
continue
dataset_feature = Utils.convert_to_numpy_array(details['feature_array'])
feature_dist_sum = distmax(middle_feature, dataset_feature) - distmin(middle_feature, dataset_feature)
distances[id] = {
'name': details['name'],
'distance_value': feature_dist_sum
}
return dict(distances)
def __get_videoId(self, name: str) -> Optional[str]:
song_name = f"{name} Official video"
youtube_id = self.__get_videoId_from_api(song_name)
if not youtube_id:
youtube_id = self.__get_videoId_from_google(song_name)
return youtube_id
def __get_videoId_from_api(self, song_name: str) -> Optional[str]:
url = f"https://youtube.googleapis.com/youtube/v3/search?maxResults=1®ionCode=US&key={self.yt_api_key}&type=video&q={song_name}"
data = requests.get(url).json()
if 'items' not in data or not data['items']:
return None
return data['items'][0]['id']['videoId']
def __get_videoId_from_google(self, song_name: str) -> Optional[str]:
try:
search_result_list = list(search(query=song_name, tld="com", num=20, stop=3, pause=1))
video_url = None
for i in range(len(search_result_list)):
page = requests.get(search_result_list[i])
url = str(BeautifulSoup(page.url, features="lxml"))
if '<html><body><p>https://www.youtube.com/watch?v=' in url:
video_url = url
break
if not video_url:
return None
video_id = video_url.replace('<html><body><p>https://www.youtube.com/watch?v=', '').replace('</p></body></html>', '')
self.logger.info(f" * [GetNextSong]videoId: {video_id}, video url: {video_url}, type: {type(video_url)}")
return video_id
except:
return None | <filename>app/service/system.py
from app.deleteuser import DeleteUser
from app.repository.elasticsearch_context import ESContext
from app.repository.db_context import DBContext
from logging import Logger
from types import FunctionType
from typing import Any, Dict, List, Optional, Tuple
from flask import abort
import requests, random, re
import pandas as pd
from app.service.distance import Distance
from app.service.utils import Utils
from collections import defaultdict, namedtuple
from googlesearch import search
from bs4 import BeautifulSoup
from app.service.central_tendency import CentralTendencies
from elasticsearch import Elasticsearch
class RecommendationSystem:
def __init__(self, logger: Logger, db: DBContext, es: ESContext, cfg: Dict, rpath: str, yt_api_key: str):
self.logger = logger
self.db = db
self.cfg = cfg
self.rpath = rpath
self.yt_api_key = yt_api_key
self.es = es
self.__songs_dataset = self.__get_processed_dataset()
self.es.configure(self.__songs_dataset)
self.logger.info(f" * Loaded items in Elastic Search")
self.logger.info(f" * Number of songs: {len(self.__songs_dataset)}")
self.logger.info(
f" * First 10 songs from dataset: { list(self.__songs_dataset.items())[0:10] }"
)
def __get_processed_dataset(self) -> Dict:
feature_json = Utils.read_json(filename=self.cfg['dataset']['curated'])
if feature_json:
return feature_json
df = Utils.read_csv(self.cfg['dataset']['original'])
norm_data = self.__get_minmax_values(df)
self.logger.info(f"Min max values for normalization: {norm_data}")
songs, existing_songs = {}, set()
for _, row in df.iterrows():
if row['year'] < self.cfg['distance_algorithm']['threshold_year']:
continue
if Utils.get_curated_name_dataset(row['artists'], row['name']) in existing_songs:
continue
existing_songs.add(Utils.get_curated_name_dataset(row['artists'], row['name']))
songs[row['id']] = {
'name': Utils.get_curated_name_dataset(row['artists'], row['name'], row['year']),
'artists': Utils.split_artists(row['artists']),
'feature_array': [
self.__compute_feature_value(row['acousticness'], norm_data['acousticness'], self.cfg['weights']['acousticness']),
self.__compute_feature_value(row['danceability'], norm_data['danceability'], self.cfg['weights']['danceability']),
self.__compute_feature_value(row['energy'], norm_data['energy'], self.cfg['weights']['energy']),
self.__compute_feature_value(row['instrumentalness'], norm_data['instrumentalness'], self.cfg['weights']['instrumentalness']),
self.__compute_feature_value(row['valence'], norm_data['valence'], self.cfg['weights']['valence']),
self.__compute_feature_value(row['tempo'], norm_data['tempo'], self.cfg['weights']['tempo']),
self.__compute_feature_value(row['liveness'], norm_data['liveness'], self.cfg['weights']['liveness']),
self.__compute_feature_value(row['loudness'], norm_data['loudness'], self.cfg['weights']['loudness']),
self.__compute_feature_value(row['speechiness'], norm_data['speechiness'], self.cfg['weights']['speechiness']),
self.__compute_feature_value(row['mode'], norm_data['mode'], self.cfg['weights']['mode']),
self.__compute_feature_value(row['popularity'], norm_data['popularity'], self.cfg['weights']['popularity']),
self.__compute_feature_value(row['year'], norm_data['year'], self.cfg['weights']['year']),
]
}
Utils.save_json(songs, self.rpath, filename=self.cfg['dataset']['curated'])
return songs
def __get_minmax_values(self, df: Any) -> Dict:
Minmax = namedtuple('Minmax', ('min, max'))
return {
'acousticness': Minmax(min=min(df['acousticness']), max=max(df['acousticness'])),
'danceability': Minmax(min=min(df['danceability']), max=max(df['danceability'])),
'energy': Minmax(min=min(df['energy']), max=max(df['energy'])),
'mode': Minmax(min=min(df['mode']), max=max(df['mode'])),
'tempo': Minmax(min=min(df['tempo']), max=max(df['tempo'])),
'instrumentalness': Minmax(min=min(df['instrumentalness']), max=max(df['instrumentalness'])),
'speechiness': Minmax(min=min(df['speechiness']), max=max(df['speechiness'])),
'loudness': Minmax(min=min(df['loudness']), max=max(df['loudness'])),
'liveness': Minmax(min=min(df['liveness']), max=max(df['liveness'])),
'valence': Minmax(min=min(df['valence']), max=max(df['valence'])),
'popularity': Minmax(min=min(df['popularity']), max=max(df['popularity'])),
'year': Minmax(min=self.cfg['distance_algorithm']['threshold_year'], max=max(df['year'])),
}
def __compute_feature_value(self, row, normalized_data_tuple, weight) -> float:
normalized_value = Utils.normalize(row, normalized_data_tuple.min, normalized_data_tuple.max)
return normalized_value * weight
def delete_user(self, user_id: str) -> str:
self.db.delete_user(user_id)
def get_random_songs(self, processed_songs: Dict) -> List[Dict]:
search_res = self.es.get_random_items()
results = []
for sr in search_res['hits']['hits']:
if len(results) >= self.cfg['distance_algorithm']['query_songs_limit']:
break
if sr['_id'] in processed_songs['liked'] or sr['_id'] in processed_songs['skipped']:
continue
if Utils.get_year_from_name(sr['_source']['name']) < 2015:
if random.uniform(0, 1) < 0.8:
continue
results.append({
"id": sr['_id'],
"name": sr['_source']['name'],
"youtubeId": self.__get_videoId(sr['_source']['name'])
})
return results
def get_song_names(self, search_query: str) -> List[Dict]:
search_res = self.es.get_item_by_query(search_query, self.cfg['distance_algorithm']['query_songs_limit'])
if len(search_res) == 0:
abort(404, f"Couldn't find any songs for '{search_query}'")
results = []
for sr in search_res['hits']['hits']:
results.append({
"id": sr['_id'],
"name": sr['_source']['name'],
"youtubeId": self.__get_videoId(sr['_source']['name'])
})
return results
def get_next_song(self, processed_songs: Dict) -> Dict:
song_threshold = self.cfg['distance_algorithm']['minimmum_songs']
if len(processed_songs['liked']) < song_threshold:
abort(400, f"There are not enough liked songs in the api request, min: {song_threshold} liked songs!")
user_id = processed_songs['userId']
if self.db.user_has_songs(user_id) and self.db.get_liked_nr_songs(user_id) == len(processed_songs['liked']):
next_song = self.__get_song_from_db(processed_songs, user_id)
if next_song:
next_song['youtubeId'] = self.__get_videoId(next_song['name'])
self.logger.info(f" * [GetNextSong]Next Song: {next_song}, type: {type(next_song)}")
return next_song
tmp_dist = self.__get_all_songs_distances(
processed_songs,
distmax=eval(self.cfg['distance_algorithm']['distmax']),
distmin=eval(self.cfg['distance_algorithm']['distmin']),
eval_func=eval(self.cfg['distance_algorithm']['eval_func'])
)
if len(tmp_dist) == 0:
abort(500, "There are no more songs to recommend! Congrats, it's statistically impossible to get here!")
sorted_distances = dict(sorted(tmp_dist.items(), key=lambda item: item[1]['distance_value'], reverse=True))
distances = {A:N for (A,N) in [x for x in sorted_distances.items()][:self.cfg['distance_algorithm']['results_count']]}
self.logger.info(
f" * [GetNextSong]First {len(distances)} closest songs calculated by feature distance: { [val['name'] for val in distances.values()] }"
)
if self.db.user_has_songs(user_id):
self.db.update_user_songs(user_id, distances, len(processed_songs['liked']))
self.logger.info(" * [GetNextSong]Inserted new distances in db")
else:
user_dist = {
'user_id': user_id,
'liked_songs': len(processed_songs['liked']),
'songs': distances
}
self.db.insert_user_songs(user_dist)
self.logger.info(" * [GetNextSong]Updated new distances in db")
result = [{'id': ID, 'name': NAME['name']} for (ID, NAME) in [x for x in sorted_distances.items()][:1]][0] # !!!! lol
result['youtubeId'] = self.__get_videoId(result['name'])
self.logger.info(f" * [GetNextSong]Result: {result}, type: {type(result)}")
return result
def __get_song_from_db(self, processed_songs: Dict, user_id: str) -> Optional[Dict]:
calculated_distances = self.db.get_user_songs(user_id)
self.logger.info(f" * [GetNextSong]Distances from db: {calculated_distances}")
next_song = None
for key in calculated_distances:
if key in processed_songs['liked'] or key in processed_songs['skipped']:
continue
else:
next_song = {
'id': key,
'name': calculated_distances[key]['name']
}
break
return next_song
def __get_all_songs_distances(self, processed_songs: Dict, distmax: FunctionType, distmin: FunctionType, eval_func: FunctionType) -> Dict:
distances, liked_songs = defaultdict(lambda: {'name': '', 'distance_value': 0}), []
for song in processed_songs['liked']:
liked_songs.append(self.__songs_dataset[song]['feature_array'])
middle_feature = eval_func(
Utils.convert_to_numpy_array(liked_songs)
)
self.logger.info(f" * [GetNextSong]Avg. feature values: {middle_feature}")
for id, details in self.__songs_dataset.items():
if id in processed_songs['skipped'] or id in processed_songs['liked']:
continue
dataset_feature = Utils.convert_to_numpy_array(details['feature_array'])
feature_dist_sum = distmax(middle_feature, dataset_feature) - distmin(middle_feature, dataset_feature)
distances[id] = {
'name': details['name'],
'distance_value': feature_dist_sum
}
return dict(distances)
def __get_videoId(self, name: str) -> Optional[str]:
song_name = f"{name} Official video"
youtube_id = self.__get_videoId_from_api(song_name)
if not youtube_id:
youtube_id = self.__get_videoId_from_google(song_name)
return youtube_id
def __get_videoId_from_api(self, song_name: str) -> Optional[str]:
url = f"https://youtube.googleapis.com/youtube/v3/search?maxResults=1®ionCode=US&key={self.yt_api_key}&type=video&q={song_name}"
data = requests.get(url).json()
if 'items' not in data or not data['items']:
return None
return data['items'][0]['id']['videoId']
def __get_videoId_from_google(self, song_name: str) -> Optional[str]:
try:
search_result_list = list(search(query=song_name, tld="com", num=20, stop=3, pause=1))
video_url = None
for i in range(len(search_result_list)):
page = requests.get(search_result_list[i])
url = str(BeautifulSoup(page.url, features="lxml"))
if '<html><body><p>https://www.youtube.com/watch?v=' in url:
video_url = url
break
if not video_url:
return None
video_id = video_url.replace('<html><body><p>https://www.youtube.com/watch?v=', '').replace('</p></body></html>', '')
self.logger.info(f" * [GetNextSong]videoId: {video_id}, video url: {video_url}, type: {type(video_url)}")
return video_id
except:
return None | en | 0.271721 | # !!!! lol | 2.222356 | 2 |
3rdparty/pytorch/caffe2/python/lengths_reducer_fused_8bit_rowwise_ops_test.py | WoodoLee/TorchCraft | 0 | 6619055 | from __future__ import absolute_import, division, print_function, unicode_literals
import caffe2.python.hypothesis_test_util as hu
import hypothesis.strategies as st
import numpy as np
from caffe2.python import core, workspace
from hypothesis import given
class TestLengthsReducerOpsFused8BitRowwise(hu.HypothesisTestCase):
@given(
batchsize=st.integers(1, 20),
blocksize=st.sampled_from([8, 16, 32, 64, 85, 96, 128, 163]),
weighted=st.booleans(),
seed=st.integers(0, 2 ** 32 - 1),
empty_indices=st.booleans(),
)
def test_sparse_lengths_sum(
self, batchsize, blocksize, weighted, seed, empty_indices
):
net = core.Net("bench")
np.random.seed(seed)
input_data = np.random.rand(batchsize, blocksize).astype(np.float32)
if empty_indices:
indices = np.empty(0, dtype=np.int32)
else:
indices = np.random.randint(
low=0,
high=len(input_data),
size=[np.random.randint(len(input_data))],
dtype=np.int32,
)
weights = np.random.uniform(size=[len(indices)]).astype(np.float32)
lengths_split = np.clip(1, len(indices) // 2, 10)
lengths = (
np.ones([len(indices) // lengths_split], dtype=np.int32) * lengths_split
)
quantized_data = net.FloatToFused8BitRowwiseQuantized(
"input_data", "quantized_data"
)
dequantized_data = net.Fused8BitRowwiseQuantizedToFloat(
quantized_data, "dequantized_data"
)
if weighted:
net.SparseLengthsWeightedSum(
[dequantized_data, "weights", "indices", "lengths"],
"sum_reference",
)
net.SparseLengthsWeightedSumFused8BitRowwise(
[quantized_data, "weights", "indices", "lengths"], "sum_quantized"
)
else:
net.SparseLengthsSum(
[dequantized_data, "indices", "lengths"], "sum_reference",
)
net.SparseLengthsSumFused8BitRowwise(
[quantized_data, "indices", "lengths"], "sum_quantized"
)
workspace.FeedBlob("input_data", input_data)
workspace.FeedBlob("weights", weights)
workspace.FeedBlob("indices", indices)
workspace.FeedBlob("lengths", lengths)
workspace.GlobalInit(["caffe2", "--caffe2_log_level=0"])
workspace.CreateNet(net)
workspace.RunNetOnce(net)
sum_reference = workspace.FetchBlob("sum_reference")
sum_quantized = workspace.FetchBlob("sum_quantized")
np.testing.assert_array_almost_equal(sum_reference, sum_quantized)
@given(
batchsize=st.integers(1, 20),
blocksize=st.sampled_from([8, 16, 32, 64, 85, 96, 128, 163]),
seed=st.integers(0, 2 ** 32 - 1),
empty_indices=st.booleans(),
)
def test_sparse_lengths_mean(self, batchsize, blocksize, seed, empty_indices):
net = core.Net("bench")
np.random.seed(seed)
input_data = np.random.rand(batchsize, blocksize).astype(np.float32)
if empty_indices:
indices = np.empty(0, dtype=np.int32)
lengths = np.zeros(batchsize, dtype=np.int32)
else:
indices = np.random.randint(
low=0,
high=len(input_data),
size=[np.random.randint(len(input_data))],
dtype=np.int32,
)
lengths_split = np.clip(1, len(indices) // 2, 10)
lengths = (
np.ones([len(indices) // lengths_split], dtype=np.int32) * lengths_split
)
print(indices, lengths)
quantized_data = net.FloatToFused8BitRowwiseQuantized(
"input_data", "quantized_data"
)
dequantized_data = net.Fused8BitRowwiseQuantizedToFloat(
quantized_data, "dequantized_data"
)
net.SparseLengthsMean(
[dequantized_data, "indices", "lengths"], "mean_reference"
)
net.SparseLengthsMeanFused8BitRowwise(
[quantized_data, "indices", "lengths"], "mean_quantized"
)
workspace.FeedBlob("input_data", input_data)
workspace.FeedBlob("indices", indices)
workspace.FeedBlob("lengths", lengths)
workspace.GlobalInit(["caffe2", "--caffe2_log_level=0"])
workspace.CreateNet(net)
workspace.RunNetOnce(net)
mean_reference = workspace.FetchBlob("mean_reference")
mean_quantized = workspace.FetchBlob("mean_quantized")
np.testing.assert_array_almost_equal(mean_reference, mean_quantized)
| from __future__ import absolute_import, division, print_function, unicode_literals
import caffe2.python.hypothesis_test_util as hu
import hypothesis.strategies as st
import numpy as np
from caffe2.python import core, workspace
from hypothesis import given
class TestLengthsReducerOpsFused8BitRowwise(hu.HypothesisTestCase):
@given(
batchsize=st.integers(1, 20),
blocksize=st.sampled_from([8, 16, 32, 64, 85, 96, 128, 163]),
weighted=st.booleans(),
seed=st.integers(0, 2 ** 32 - 1),
empty_indices=st.booleans(),
)
def test_sparse_lengths_sum(
self, batchsize, blocksize, weighted, seed, empty_indices
):
net = core.Net("bench")
np.random.seed(seed)
input_data = np.random.rand(batchsize, blocksize).astype(np.float32)
if empty_indices:
indices = np.empty(0, dtype=np.int32)
else:
indices = np.random.randint(
low=0,
high=len(input_data),
size=[np.random.randint(len(input_data))],
dtype=np.int32,
)
weights = np.random.uniform(size=[len(indices)]).astype(np.float32)
lengths_split = np.clip(1, len(indices) // 2, 10)
lengths = (
np.ones([len(indices) // lengths_split], dtype=np.int32) * lengths_split
)
quantized_data = net.FloatToFused8BitRowwiseQuantized(
"input_data", "quantized_data"
)
dequantized_data = net.Fused8BitRowwiseQuantizedToFloat(
quantized_data, "dequantized_data"
)
if weighted:
net.SparseLengthsWeightedSum(
[dequantized_data, "weights", "indices", "lengths"],
"sum_reference",
)
net.SparseLengthsWeightedSumFused8BitRowwise(
[quantized_data, "weights", "indices", "lengths"], "sum_quantized"
)
else:
net.SparseLengthsSum(
[dequantized_data, "indices", "lengths"], "sum_reference",
)
net.SparseLengthsSumFused8BitRowwise(
[quantized_data, "indices", "lengths"], "sum_quantized"
)
workspace.FeedBlob("input_data", input_data)
workspace.FeedBlob("weights", weights)
workspace.FeedBlob("indices", indices)
workspace.FeedBlob("lengths", lengths)
workspace.GlobalInit(["caffe2", "--caffe2_log_level=0"])
workspace.CreateNet(net)
workspace.RunNetOnce(net)
sum_reference = workspace.FetchBlob("sum_reference")
sum_quantized = workspace.FetchBlob("sum_quantized")
np.testing.assert_array_almost_equal(sum_reference, sum_quantized)
@given(
batchsize=st.integers(1, 20),
blocksize=st.sampled_from([8, 16, 32, 64, 85, 96, 128, 163]),
seed=st.integers(0, 2 ** 32 - 1),
empty_indices=st.booleans(),
)
def test_sparse_lengths_mean(self, batchsize, blocksize, seed, empty_indices):
net = core.Net("bench")
np.random.seed(seed)
input_data = np.random.rand(batchsize, blocksize).astype(np.float32)
if empty_indices:
indices = np.empty(0, dtype=np.int32)
lengths = np.zeros(batchsize, dtype=np.int32)
else:
indices = np.random.randint(
low=0,
high=len(input_data),
size=[np.random.randint(len(input_data))],
dtype=np.int32,
)
lengths_split = np.clip(1, len(indices) // 2, 10)
lengths = (
np.ones([len(indices) // lengths_split], dtype=np.int32) * lengths_split
)
print(indices, lengths)
quantized_data = net.FloatToFused8BitRowwiseQuantized(
"input_data", "quantized_data"
)
dequantized_data = net.Fused8BitRowwiseQuantizedToFloat(
quantized_data, "dequantized_data"
)
net.SparseLengthsMean(
[dequantized_data, "indices", "lengths"], "mean_reference"
)
net.SparseLengthsMeanFused8BitRowwise(
[quantized_data, "indices", "lengths"], "mean_quantized"
)
workspace.FeedBlob("input_data", input_data)
workspace.FeedBlob("indices", indices)
workspace.FeedBlob("lengths", lengths)
workspace.GlobalInit(["caffe2", "--caffe2_log_level=0"])
workspace.CreateNet(net)
workspace.RunNetOnce(net)
mean_reference = workspace.FetchBlob("mean_reference")
mean_quantized = workspace.FetchBlob("mean_quantized")
np.testing.assert_array_almost_equal(mean_reference, mean_quantized)
| none | 1 | 2.299531 | 2 | |
src/kgmk/io/reading/np_reader.py | kagemeka/python | 0 | 6619056 | import numpy as np
from . import (
Reader,
)
class NPReader(Reader):
def line_ints(
self,
) -> np.array:
return np.fromstring(
string=self.str_(),
dtype=np.int64,
sep=' ',
)
def read_ints(
self,
) -> np.array:
return np.fromstring(
string=(
self().decode()
),
dtype=np.int64,
sep=' ',
) | import numpy as np
from . import (
Reader,
)
class NPReader(Reader):
def line_ints(
self,
) -> np.array:
return np.fromstring(
string=self.str_(),
dtype=np.int64,
sep=' ',
)
def read_ints(
self,
) -> np.array:
return np.fromstring(
string=(
self().decode()
),
dtype=np.int64,
sep=' ',
) | none | 1 | 2.874922 | 3 | |
src/tandlr/security_configuration/serializers.py | shrmoud/schoolapp | 0 | 6619057 | <filename>src/tandlr/security_configuration/serializers.py
# -*- coding: utf-8 -*-
from rest_framework import serializers
from tandlr.security_configuration.models import (
Permission,
PermissionRole,
Role,
RoleUser
)
from tandlr.users.serializers import UserShortDetailSerializer
class RoleSerializer(serializers.ModelSerializer):
class Meta:
model = Role
fields = (
'id',
'name',
'description',
'status'
)
class RoleUserSerializer(serializers.ModelSerializer):
class Meta:
model = RoleUser
fields = (
'id',
'role',
'user'
)
class RoleUserDetailSerializer(serializers.ModelSerializer):
role = RoleSerializer()
user = UserShortDetailSerializer()
class Meta:
model = RoleUser
fields = (
'id',
'role',
'user'
)
class PermissionSerializer(serializers.ModelSerializer):
class Meta:
model = Permission
fields = (
'id',
'name',
'description',
'status'
)
class PermissionRoleSerializer(serializers.ModelSerializer):
class Meta:
model = PermissionRole
fields = (
'id',
'permission',
'role'
)
class PermissionRoleDetailSerializer(serializers.ModelSerializer):
permission = PermissionSerializer()
role = RoleSerializer()
class Meta:
model = PermissionRole
fields = (
'id',
'permission',
'role'
)
| <filename>src/tandlr/security_configuration/serializers.py
# -*- coding: utf-8 -*-
from rest_framework import serializers
from tandlr.security_configuration.models import (
Permission,
PermissionRole,
Role,
RoleUser
)
from tandlr.users.serializers import UserShortDetailSerializer
class RoleSerializer(serializers.ModelSerializer):
class Meta:
model = Role
fields = (
'id',
'name',
'description',
'status'
)
class RoleUserSerializer(serializers.ModelSerializer):
class Meta:
model = RoleUser
fields = (
'id',
'role',
'user'
)
class RoleUserDetailSerializer(serializers.ModelSerializer):
role = RoleSerializer()
user = UserShortDetailSerializer()
class Meta:
model = RoleUser
fields = (
'id',
'role',
'user'
)
class PermissionSerializer(serializers.ModelSerializer):
class Meta:
model = Permission
fields = (
'id',
'name',
'description',
'status'
)
class PermissionRoleSerializer(serializers.ModelSerializer):
class Meta:
model = PermissionRole
fields = (
'id',
'permission',
'role'
)
class PermissionRoleDetailSerializer(serializers.ModelSerializer):
permission = PermissionSerializer()
role = RoleSerializer()
class Meta:
model = PermissionRole
fields = (
'id',
'permission',
'role'
)
| en | 0.769321 | # -*- coding: utf-8 -*- | 2.283761 | 2 |
setup.py | henriquegemignani/Super-Duper-Metroid | 4 | 6619058 | from setuptools import setup
from Cython.Build import cythonize
setup(ext_modules=cythonize("src/SuperDuperMetroid/BPSPatch/BPS_Patcher.pyx", annotate=True))
| from setuptools import setup
from Cython.Build import cythonize
setup(ext_modules=cythonize("src/SuperDuperMetroid/BPSPatch/BPS_Patcher.pyx", annotate=True))
| none | 1 | 1.130307 | 1 | |
tests/expected/exceptions.py | div72/py2many | 345 | 6619059 | from typing import Callable, Dict, List, Set, Optional
from ctypes import c_int8 as i8, c_int16 as i16, c_int32 as i32, c_int64 as i64
from ctypes import c_uint8 as u8, c_uint16 as u16, c_uint32 as u32, c_uint64 as u64
import sys
def show():
try:
raise Exception("foo")
except Exception as e:
print("caught")
finally:
print("Finally")
try:
3 / 0
except ZeroDivisionError:
print("OK")
try:
raise Exception("foo")
except:
print("Got it")
if __name__ == "__main__":
show()
| from typing import Callable, Dict, List, Set, Optional
from ctypes import c_int8 as i8, c_int16 as i16, c_int32 as i32, c_int64 as i64
from ctypes import c_uint8 as u8, c_uint16 as u16, c_uint32 as u32, c_uint64 as u64
import sys
def show():
try:
raise Exception("foo")
except Exception as e:
print("caught")
finally:
print("Finally")
try:
3 / 0
except ZeroDivisionError:
print("OK")
try:
raise Exception("foo")
except:
print("Got it")
if __name__ == "__main__":
show()
| none | 1 | 2.749122 | 3 | |
scripts/evaluate.py | erickrf/infernal | 2 | 6619060 | <filename>scripts/evaluate.py<gh_stars>1-10
# -*- coding: utf-8 -*-
"""
Evaluate a shallow classifier
"""
from __future__ import division, print_function, unicode_literals
import argparse
import numpy as np
from sklearn.metrics import f1_score
from infernal import shallow_utils as shallow
def evaluate(classifier, normalizer, transformer, x, y):
"""
Evaluate the performance of the classifier with the given data
"""
if normalizer is not None:
x = normalizer.transform(x)
if transformer is not None:
x = transformer.transform(x)
preds = classifier.predict(x)
acc = np.sum(y == preds) / len(y)
f1 = f1_score(y, preds, average='macro')
print('Accuracy: {:.2%}'.format(acc))
print('F1 macro: {:.3}'.format(f1))
if __name__ == '__main__':
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('data', help='Preprocessed data (npz) to evaluate the'
'classifier on')
parser.add_argument('model', help='Directory with saved model')
args = parser.parse_args()
x, y = shallow.load_data(args.data)
classifier = shallow.load_classifier(args.model)
normalizer = shallow.load_normalizer(args.model)
transformer = shallow.load_transformer(args.model)
evaluate(classifier, normalizer, transformer, x, y)
| <filename>scripts/evaluate.py<gh_stars>1-10
# -*- coding: utf-8 -*-
"""
Evaluate a shallow classifier
"""
from __future__ import division, print_function, unicode_literals
import argparse
import numpy as np
from sklearn.metrics import f1_score
from infernal import shallow_utils as shallow
def evaluate(classifier, normalizer, transformer, x, y):
"""
Evaluate the performance of the classifier with the given data
"""
if normalizer is not None:
x = normalizer.transform(x)
if transformer is not None:
x = transformer.transform(x)
preds = classifier.predict(x)
acc = np.sum(y == preds) / len(y)
f1 = f1_score(y, preds, average='macro')
print('Accuracy: {:.2%}'.format(acc))
print('F1 macro: {:.3}'.format(f1))
if __name__ == '__main__':
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('data', help='Preprocessed data (npz) to evaluate the'
'classifier on')
parser.add_argument('model', help='Directory with saved model')
args = parser.parse_args()
x, y = shallow.load_data(args.data)
classifier = shallow.load_classifier(args.model)
normalizer = shallow.load_normalizer(args.model)
transformer = shallow.load_transformer(args.model)
evaluate(classifier, normalizer, transformer, x, y)
| en | 0.716261 | # -*- coding: utf-8 -*- Evaluate a shallow classifier Evaluate the performance of the classifier with the given data | 2.74303 | 3 |
tests/validators/decimal_validator_test.py | binary-butterfly/validataclass | 0 | 6619061 | <filename>tests/validators/decimal_validator_test.py
"""
validataclass
Copyright (c) 2021, binary butterfly GmbH and contributors
Use of this source code is governed by an MIT-style license that can be found in the LICENSE file.
"""
from decimal import Decimal
import pytest
from tests.test_utils import unpack_params
from validataclass.exceptions import RequiredValueError, InvalidTypeError, InvalidDecimalError, NumberRangeError, \
DecimalPlacesError, InvalidValidatorOptionException
from validataclass.validators import DecimalValidator
class DecimalValidatorTest:
@staticmethod
@pytest.mark.parametrize(
'input_data, expected_decimal_str', [
('0', '0'),
('1.234', '1.234'),
('-0.001', '-0.001'),
('+42', '42'),
('-.1', '-0.1'),
('-1.', '-1'),
('-123456789.123456789', '-123456789.123456789'),
]
)
def test_valid_decimal(input_data, expected_decimal_str):
""" Test DecimalValidator with valid input strings. """
validator = DecimalValidator()
decimal = validator.validate(input_data)
assert type(decimal) is Decimal
assert str(decimal) == expected_decimal_str
@staticmethod
def test_invalid_none():
""" Check that DecimalValidator raises exceptions for None as value. """
validator = DecimalValidator()
with pytest.raises(RequiredValueError) as exception_info:
validator.validate(None)
assert exception_info.value.to_dict() == {'code': 'required_value'}
@staticmethod
@pytest.mark.parametrize('input_data', [1234, 1.234, True])
def test_invalid_wrong_type(input_data):
""" Check that DecimalValidator raises exceptions for values that are not of type 'str'. """
validator = DecimalValidator()
with pytest.raises(InvalidTypeError) as exception_info:
validator.validate(input_data)
assert exception_info.value.to_dict() == {
'code': 'invalid_type',
'expected_type': 'str',
}
@staticmethod
@pytest.mark.parametrize('input_data', ['', 'bananana', '1234x', '$123', '1,234', 'Infinity', 'NaN', '.', '1e3'])
def test_invalid_malformed_string(input_data):
""" Test DecimalValidator with malformed strings. """
validator = DecimalValidator()
with pytest.raises(InvalidDecimalError) as exception_info:
validator.validate(input_data)
assert exception_info.value.to_dict() == {'code': 'invalid_decimal'}
# Test value range requirement check
@staticmethod
@pytest.mark.parametrize(
'min_value, max_value, input_data', [
# min_value only (as Decimal object and as string)
*unpack_params(
Decimal('1'), None,
['1', '1.000', '1.00001', '+1.1', '42'],
),
*unpack_params(
'-3.000', None,
['-3', '-2.9999', '-2', '0.000', '1.234', '4.567'],
),
# max_value only
*unpack_params(
None, Decimal('-10.5'),
['-10.5', '-10.6', '-11', '-9999.999'],
),
*unpack_params(
None, '-0',
['-1.234', '-0.001', '0', '-0', '+0'],
),
# min_value and max_value
*unpack_params(
Decimal('0'), Decimal('10'),
['0.000', '0.001', '1', '9.9999', '+10.00000'],
),
*unpack_params(
'-1', '1',
['-1.0000', '-0.99999', '-0.0001', '0', '0.001', '0.9999', '1.000'],
),
*unpack_params(
Decimal('42'), '42.0',
['42', '+42', '42.0000000'],
),
]
)
def test_decimal_value_range_valid(min_value, max_value, input_data):
""" Test DecimalValidator with range requirements with valid decimal strings. """
validator = DecimalValidator(min_value=min_value, max_value=max_value)
decimal = validator.validate(input_data)
assert type(decimal) is Decimal
assert str(decimal) == input_data.lstrip('+').rstrip('.')
@staticmethod
@pytest.mark.parametrize(
'min_value, max_value, input_data', [
# min_value only (as Decimal object and as string)
*unpack_params(
Decimal('1'), None,
['0.9999', '0', '-1.00001', '-42'],
),
*unpack_params(
'-3.000', None,
['-3.000000001', '-3.1', '-42'],
),
# max_value only
*unpack_params(
None, Decimal('-10.5'),
['-10.499', '-10.000', '-10', '0', '0.001', '42'],
),
*unpack_params(
None, '-0',
['0.001', '1', '123.456'],
),
# min_value and max_value
*unpack_params(
Decimal('0'), Decimal('10'),
['-0.001', '-1', '10.0000001', '+42'],
),
*unpack_params(
'-1', '1',
['-1.0001', '-9.999', '1.000001', '1.234', '+42'],
),
*unpack_params(
Decimal('42'), '42.0',
['0', '41.999999', '42.00000001', '-42'],
),
]
)
def test_decimal_value_range_invalid(min_value, max_value, input_data):
""" Test DecimalValidator with range requirements with decimal strings outside the range. """
validator = DecimalValidator(min_value=min_value, max_value=max_value)
# Construct error dict with min_value and/or max_value, depending on which is specified
expected_error_dict = {'code': 'number_range_error'}
expected_error_dict.update({'min_value': str(min_value)} if min_value is not None else {})
expected_error_dict.update({'max_value': str(max_value)} if max_value is not None else {})
with pytest.raises(NumberRangeError) as exception_info:
validator.validate(input_data)
assert exception_info.value.to_dict() == expected_error_dict
# Test minimum/maximum decimal places requirements
@staticmethod
@pytest.mark.parametrize(
'min_places, max_places, input_data', [
# min_places only
*unpack_params(
0, None,
['0', '0.001', '0.10', '-123.456789', '42'],
),
*unpack_params(
1, None,
['0.0', '0.001', '0.10', '-123.456789', '42.0'],
),
*unpack_params(
3, None,
['0.000', '0.001', '0.100', '-123.456789', '42.000'],
),
# max_places only
*unpack_params(
None, 0,
['0', '-123', '42.'],
),
*unpack_params(
None, 1,
['0', '0.0', '0.1', '-123.4', '42'],
),
*unpack_params(
None, 3,
['0', '0.10', '0.01', '123.4', '-123.456', '42.'],
),
# min_places and max_places
*unpack_params(
0, 2,
['0', '0.0', '0.01', '0.1', '-123', '-123.45', '42.'],
),
*unpack_params(
2, 3,
['0.00', '0.000', '0.10', '0.001', '-123.45', '-123.456'],
),
*unpack_params(
2, 2,
['0.00', '0.01', '0.10', '-123.45', '42.00'],
),
]
)
def test_min_max_places_valid(min_places, max_places, input_data):
""" Test DecimalValidator with a minimum and/or maximum number of decimal places with valid decimal strings. """
validator = DecimalValidator(min_places=min_places, max_places=max_places)
decimal = validator.validate(input_data)
assert type(decimal) is Decimal
assert str(decimal) == input_data.rstrip('.')
@staticmethod
@pytest.mark.parametrize(
'min_places, max_places, input_data', [
# min_places only
*unpack_params(
1, None,
['0', '0.', '-123', '42.'],
),
*unpack_params(
3, None,
['0', '0.01', '-123.45'],
),
# max_places only
*unpack_params(
None, 0,
['0.0', '-123.4', '42.0', '0.1'],
),
*unpack_params(
None, 1,
['0.00', '0.01', '-123.45'],
),
*unpack_params(
None, 3,
['0.0000', '0.1000', '-123.4567'],
),
# min_places and max_places
*unpack_params(
2, 3,
['0.0', '0.0000', '0.1', '0.0001', '-123.4', '-123.4567', '42'],
),
*unpack_params(
2, 2,
['0.0', '0.000', '0.001', '0.1', '-123.4', '-123.456', '42'],
),
]
)
def test_min_max_places_invalid(min_places, max_places, input_data):
""" Test DecimalValidator with a minimum and/or maximum number of decimal places with invalid input. """
validator = DecimalValidator(min_places=min_places, max_places=max_places)
# Construct error dict with min_places and/or max_places, depending on which is specified
expected_error_dict = {'code': 'decimal_places'}
expected_error_dict.update({'min_places': min_places} if min_places is not None else {})
expected_error_dict.update({'max_places': max_places} if max_places is not None else {})
with pytest.raises(DecimalPlacesError) as exception_info:
validator.validate(input_data)
assert exception_info.value.to_dict() == expected_error_dict
# Test output_places parameter
@staticmethod
@pytest.mark.parametrize(
'output_places, input_data, expected_output', [
# output_places=0
(0, '0', '0'),
(0, '-42', '-42'),
(0, '42.0', '42'),
(0, '123.456', '123'), # rounded down
(0, '123.567', '124'), # rounded up
# output_places=1
(1, '0', '0.0'),
(1, '-42', '-42.0'),
(1, '42.000', '42.0'),
(1, '123.456', '123.5'),
(1, '999.999', '1000.0'),
# output_places=3
(3, '0', '0.000'),
(3, '-42', '-42.000'),
(3, '42.000000', '42.000'),
(3, '123.456', '123.456'),
(3, '123.456789', '123.457'),
# output_places=9
(9, '1.234', '1.234000000')
]
)
def test_output_places(output_places, input_data, expected_output):
""" Test DecimalValidator with output_places parameter (fixed number of decimal places in output value). """
validator = DecimalValidator(output_places=output_places)
decimal = validator.validate(input_data)
assert type(decimal) is Decimal
assert str(decimal) == expected_output
# Invalid validator parameters
@staticmethod
def test_min_value_greater_than_max_value():
""" Check that DecimalValidator raises exception when min_value is greater than max_value. """
with pytest.raises(InvalidValidatorOptionException) as exception_info:
DecimalValidator(min_value='4.0', max_value='3.9')
assert str(exception_info.value) == 'Parameter "min_value" cannot be greater than "max_value".'
@staticmethod
def test_min_places_negative():
""" Check that DecimalValidator raises exception when min_places is less than 0. """
with pytest.raises(InvalidValidatorOptionException) as exception_info:
DecimalValidator(min_places=-1)
assert str(exception_info.value) == 'Parameter "min_places" cannot be negative.'
@staticmethod
def test_max_places_negative():
""" Check that DecimalValidator raises exception when max_places is less than 0. """
with pytest.raises(InvalidValidatorOptionException) as exception_info:
DecimalValidator(max_places=-1)
assert str(exception_info.value) == 'Parameter "max_places" cannot be negative.'
@staticmethod
def test_min_places_greater_than_max_places():
""" Check that DecimalValidator raises exception when min_places is greater than max_places. """
with pytest.raises(InvalidValidatorOptionException) as exception_info:
DecimalValidator(min_places=3, max_places=2)
assert str(exception_info.value) == 'Parameter "min_places" cannot be greater than "max_places".'
@staticmethod
def test_output_places_negative():
""" Check that DecimalValidator raises exception when output_places is less than 0. """
with pytest.raises(InvalidValidatorOptionException) as exception_info:
DecimalValidator(output_places=-1)
assert str(exception_info.value) == 'Parameter "output_places" cannot be negative.'
| <filename>tests/validators/decimal_validator_test.py
"""
validataclass
Copyright (c) 2021, binary butterfly GmbH and contributors
Use of this source code is governed by an MIT-style license that can be found in the LICENSE file.
"""
from decimal import Decimal
import pytest
from tests.test_utils import unpack_params
from validataclass.exceptions import RequiredValueError, InvalidTypeError, InvalidDecimalError, NumberRangeError, \
DecimalPlacesError, InvalidValidatorOptionException
from validataclass.validators import DecimalValidator
class DecimalValidatorTest:
@staticmethod
@pytest.mark.parametrize(
'input_data, expected_decimal_str', [
('0', '0'),
('1.234', '1.234'),
('-0.001', '-0.001'),
('+42', '42'),
('-.1', '-0.1'),
('-1.', '-1'),
('-123456789.123456789', '-123456789.123456789'),
]
)
def test_valid_decimal(input_data, expected_decimal_str):
""" Test DecimalValidator with valid input strings. """
validator = DecimalValidator()
decimal = validator.validate(input_data)
assert type(decimal) is Decimal
assert str(decimal) == expected_decimal_str
@staticmethod
def test_invalid_none():
""" Check that DecimalValidator raises exceptions for None as value. """
validator = DecimalValidator()
with pytest.raises(RequiredValueError) as exception_info:
validator.validate(None)
assert exception_info.value.to_dict() == {'code': 'required_value'}
@staticmethod
@pytest.mark.parametrize('input_data', [1234, 1.234, True])
def test_invalid_wrong_type(input_data):
""" Check that DecimalValidator raises exceptions for values that are not of type 'str'. """
validator = DecimalValidator()
with pytest.raises(InvalidTypeError) as exception_info:
validator.validate(input_data)
assert exception_info.value.to_dict() == {
'code': 'invalid_type',
'expected_type': 'str',
}
@staticmethod
@pytest.mark.parametrize('input_data', ['', 'bananana', '1234x', '$123', '1,234', 'Infinity', 'NaN', '.', '1e3'])
def test_invalid_malformed_string(input_data):
""" Test DecimalValidator with malformed strings. """
validator = DecimalValidator()
with pytest.raises(InvalidDecimalError) as exception_info:
validator.validate(input_data)
assert exception_info.value.to_dict() == {'code': 'invalid_decimal'}
# Test value range requirement check
@staticmethod
@pytest.mark.parametrize(
'min_value, max_value, input_data', [
# min_value only (as Decimal object and as string)
*unpack_params(
Decimal('1'), None,
['1', '1.000', '1.00001', '+1.1', '42'],
),
*unpack_params(
'-3.000', None,
['-3', '-2.9999', '-2', '0.000', '1.234', '4.567'],
),
# max_value only
*unpack_params(
None, Decimal('-10.5'),
['-10.5', '-10.6', '-11', '-9999.999'],
),
*unpack_params(
None, '-0',
['-1.234', '-0.001', '0', '-0', '+0'],
),
# min_value and max_value
*unpack_params(
Decimal('0'), Decimal('10'),
['0.000', '0.001', '1', '9.9999', '+10.00000'],
),
*unpack_params(
'-1', '1',
['-1.0000', '-0.99999', '-0.0001', '0', '0.001', '0.9999', '1.000'],
),
*unpack_params(
Decimal('42'), '42.0',
['42', '+42', '42.0000000'],
),
]
)
def test_decimal_value_range_valid(min_value, max_value, input_data):
""" Test DecimalValidator with range requirements with valid decimal strings. """
validator = DecimalValidator(min_value=min_value, max_value=max_value)
decimal = validator.validate(input_data)
assert type(decimal) is Decimal
assert str(decimal) == input_data.lstrip('+').rstrip('.')
@staticmethod
@pytest.mark.parametrize(
'min_value, max_value, input_data', [
# min_value only (as Decimal object and as string)
*unpack_params(
Decimal('1'), None,
['0.9999', '0', '-1.00001', '-42'],
),
*unpack_params(
'-3.000', None,
['-3.000000001', '-3.1', '-42'],
),
# max_value only
*unpack_params(
None, Decimal('-10.5'),
['-10.499', '-10.000', '-10', '0', '0.001', '42'],
),
*unpack_params(
None, '-0',
['0.001', '1', '123.456'],
),
# min_value and max_value
*unpack_params(
Decimal('0'), Decimal('10'),
['-0.001', '-1', '10.0000001', '+42'],
),
*unpack_params(
'-1', '1',
['-1.0001', '-9.999', '1.000001', '1.234', '+42'],
),
*unpack_params(
Decimal('42'), '42.0',
['0', '41.999999', '42.00000001', '-42'],
),
]
)
def test_decimal_value_range_invalid(min_value, max_value, input_data):
""" Test DecimalValidator with range requirements with decimal strings outside the range. """
validator = DecimalValidator(min_value=min_value, max_value=max_value)
# Construct error dict with min_value and/or max_value, depending on which is specified
expected_error_dict = {'code': 'number_range_error'}
expected_error_dict.update({'min_value': str(min_value)} if min_value is not None else {})
expected_error_dict.update({'max_value': str(max_value)} if max_value is not None else {})
with pytest.raises(NumberRangeError) as exception_info:
validator.validate(input_data)
assert exception_info.value.to_dict() == expected_error_dict
# Test minimum/maximum decimal places requirements
@staticmethod
@pytest.mark.parametrize(
'min_places, max_places, input_data', [
# min_places only
*unpack_params(
0, None,
['0', '0.001', '0.10', '-123.456789', '42'],
),
*unpack_params(
1, None,
['0.0', '0.001', '0.10', '-123.456789', '42.0'],
),
*unpack_params(
3, None,
['0.000', '0.001', '0.100', '-123.456789', '42.000'],
),
# max_places only
*unpack_params(
None, 0,
['0', '-123', '42.'],
),
*unpack_params(
None, 1,
['0', '0.0', '0.1', '-123.4', '42'],
),
*unpack_params(
None, 3,
['0', '0.10', '0.01', '123.4', '-123.456', '42.'],
),
# min_places and max_places
*unpack_params(
0, 2,
['0', '0.0', '0.01', '0.1', '-123', '-123.45', '42.'],
),
*unpack_params(
2, 3,
['0.00', '0.000', '0.10', '0.001', '-123.45', '-123.456'],
),
*unpack_params(
2, 2,
['0.00', '0.01', '0.10', '-123.45', '42.00'],
),
]
)
def test_min_max_places_valid(min_places, max_places, input_data):
""" Test DecimalValidator with a minimum and/or maximum number of decimal places with valid decimal strings. """
validator = DecimalValidator(min_places=min_places, max_places=max_places)
decimal = validator.validate(input_data)
assert type(decimal) is Decimal
assert str(decimal) == input_data.rstrip('.')
@staticmethod
@pytest.mark.parametrize(
'min_places, max_places, input_data', [
# min_places only
*unpack_params(
1, None,
['0', '0.', '-123', '42.'],
),
*unpack_params(
3, None,
['0', '0.01', '-123.45'],
),
# max_places only
*unpack_params(
None, 0,
['0.0', '-123.4', '42.0', '0.1'],
),
*unpack_params(
None, 1,
['0.00', '0.01', '-123.45'],
),
*unpack_params(
None, 3,
['0.0000', '0.1000', '-123.4567'],
),
# min_places and max_places
*unpack_params(
2, 3,
['0.0', '0.0000', '0.1', '0.0001', '-123.4', '-123.4567', '42'],
),
*unpack_params(
2, 2,
['0.0', '0.000', '0.001', '0.1', '-123.4', '-123.456', '42'],
),
]
)
def test_min_max_places_invalid(min_places, max_places, input_data):
""" Test DecimalValidator with a minimum and/or maximum number of decimal places with invalid input. """
validator = DecimalValidator(min_places=min_places, max_places=max_places)
# Construct error dict with min_places and/or max_places, depending on which is specified
expected_error_dict = {'code': 'decimal_places'}
expected_error_dict.update({'min_places': min_places} if min_places is not None else {})
expected_error_dict.update({'max_places': max_places} if max_places is not None else {})
with pytest.raises(DecimalPlacesError) as exception_info:
validator.validate(input_data)
assert exception_info.value.to_dict() == expected_error_dict
# Test output_places parameter
@staticmethod
@pytest.mark.parametrize(
'output_places, input_data, expected_output', [
# output_places=0
(0, '0', '0'),
(0, '-42', '-42'),
(0, '42.0', '42'),
(0, '123.456', '123'), # rounded down
(0, '123.567', '124'), # rounded up
# output_places=1
(1, '0', '0.0'),
(1, '-42', '-42.0'),
(1, '42.000', '42.0'),
(1, '123.456', '123.5'),
(1, '999.999', '1000.0'),
# output_places=3
(3, '0', '0.000'),
(3, '-42', '-42.000'),
(3, '42.000000', '42.000'),
(3, '123.456', '123.456'),
(3, '123.456789', '123.457'),
# output_places=9
(9, '1.234', '1.234000000')
]
)
def test_output_places(output_places, input_data, expected_output):
""" Test DecimalValidator with output_places parameter (fixed number of decimal places in output value). """
validator = DecimalValidator(output_places=output_places)
decimal = validator.validate(input_data)
assert type(decimal) is Decimal
assert str(decimal) == expected_output
# Invalid validator parameters
@staticmethod
def test_min_value_greater_than_max_value():
""" Check that DecimalValidator raises exception when min_value is greater than max_value. """
with pytest.raises(InvalidValidatorOptionException) as exception_info:
DecimalValidator(min_value='4.0', max_value='3.9')
assert str(exception_info.value) == 'Parameter "min_value" cannot be greater than "max_value".'
@staticmethod
def test_min_places_negative():
""" Check that DecimalValidator raises exception when min_places is less than 0. """
with pytest.raises(InvalidValidatorOptionException) as exception_info:
DecimalValidator(min_places=-1)
assert str(exception_info.value) == 'Parameter "min_places" cannot be negative.'
@staticmethod
def test_max_places_negative():
""" Check that DecimalValidator raises exception when max_places is less than 0. """
with pytest.raises(InvalidValidatorOptionException) as exception_info:
DecimalValidator(max_places=-1)
assert str(exception_info.value) == 'Parameter "max_places" cannot be negative.'
@staticmethod
def test_min_places_greater_than_max_places():
""" Check that DecimalValidator raises exception when min_places is greater than max_places. """
with pytest.raises(InvalidValidatorOptionException) as exception_info:
DecimalValidator(min_places=3, max_places=2)
assert str(exception_info.value) == 'Parameter "min_places" cannot be greater than "max_places".'
@staticmethod
def test_output_places_negative():
""" Check that DecimalValidator raises exception when output_places is less than 0. """
with pytest.raises(InvalidValidatorOptionException) as exception_info:
DecimalValidator(output_places=-1)
assert str(exception_info.value) == 'Parameter "output_places" cannot be negative.'
| en | 0.690448 | validataclass Copyright (c) 2021, binary butterfly GmbH and contributors Use of this source code is governed by an MIT-style license that can be found in the LICENSE file. Test DecimalValidator with valid input strings. Check that DecimalValidator raises exceptions for None as value. Check that DecimalValidator raises exceptions for values that are not of type 'str'. Test DecimalValidator with malformed strings. # Test value range requirement check # min_value only (as Decimal object and as string) # max_value only # min_value and max_value Test DecimalValidator with range requirements with valid decimal strings. # min_value only (as Decimal object and as string) # max_value only # min_value and max_value Test DecimalValidator with range requirements with decimal strings outside the range. # Construct error dict with min_value and/or max_value, depending on which is specified # Test minimum/maximum decimal places requirements # min_places only # max_places only # min_places and max_places Test DecimalValidator with a minimum and/or maximum number of decimal places with valid decimal strings. # min_places only # max_places only # min_places and max_places Test DecimalValidator with a minimum and/or maximum number of decimal places with invalid input. # Construct error dict with min_places and/or max_places, depending on which is specified # Test output_places parameter # output_places=0 # rounded down # rounded up # output_places=1 # output_places=3 # output_places=9 Test DecimalValidator with output_places parameter (fixed number of decimal places in output value). # Invalid validator parameters Check that DecimalValidator raises exception when min_value is greater than max_value. Check that DecimalValidator raises exception when min_places is less than 0. Check that DecimalValidator raises exception when max_places is less than 0. Check that DecimalValidator raises exception when min_places is greater than max_places. Check that DecimalValidator raises exception when output_places is less than 0. | 2.536229 | 3 |
autokernel/kconfig.py | oddlama/autokernel | 49 | 6619062 | from . import log
import subprocess
import os
import re
import kconfiglib
import sympy
from sympy.logic import simplify_logic
from sympy.logic.inference import satisfiable
def symbol_can_be_user_assigned(sym):
for node in sym.nodes:
if node.prompt:
return True
return False
value_to_str_color = {
'n': "[1;31m",
'm': "[1;33m",
'y': "[1;32m",
}
def value_to_str(value):
if value in kconfiglib.STR_TO_TRI:
return '[{}{}{}]'.format(log.color(value_to_str_color[value]), value, log.color_reset)
else:
return "'{}'".format(value)
def tri_to_bool(tri):
"""
Converts a tristate to a boolean value (['n'] → False, ['m', 'y'] → True)
"""
return tri != kconfiglib.STR_TO_TRI['n']
def expr_value_bool(expr):
"""
Evaluates the given expression using kconfiglib.expr_value(expr) and converts
the result to a boolean value using tri_to_bool().
"""
return tri_to_bool(kconfiglib.expr_value(expr))
def set_env_default(var, default_value):
"""
Sets an environment variable to the given default_value if it is currently unset.
"""
if var not in os.environ:
os.environ[var] = default_value
def detect_uname_arch():
return subprocess.run(['uname', '-m'], check=True, stdout=subprocess.PIPE).stdout.decode().strip().splitlines()[0]
def detect_arch():
arch = get_uname_arch()
arch = re.sub('i.86', 'x86', arch)
arch = re.sub('x86_64', 'x86', arch)
arch = re.sub('sun4u', 'sparc64', arch)
arch = re.sub('arm.*', 'arm', arch)
arch = re.sub('sa110', 'arm', arch)
arch = re.sub('s390x', 's390', arch)
arch = re.sub('parisc64', 'parisc', arch)
arch = re.sub('ppc.*', 'powerpc', arch)
arch = re.sub('mips.*', 'mips', arch)
arch = re.sub('sh[234].*', 'sh', arch)
arch = re.sub('aarch64.*', 'arm64', arch)
arch = re.sub('riscv.*', 'riscv', arch)
return arch
def initialize_environment():
"""
Initializes important environment variables, if not set by the user.
like
"""
set_env_default("CC", "gcc")
set_env_default("LD", "ldd")
set_env_default("HOSTCC", "gcc")
set_env_default("HOSTCXX", "g++")
if "CC_VERSION_TEXT" not in os.environ:
os.environ["CC_VERSION_TEXT"] = subprocess.run([os.environ['CC'], '--version'], check=True, stdout=subprocess.PIPE).stdout.decode().strip().splitlines()[0]
_arch = None
def get_arch():
"""
Returns arch of the current host as the kernel would interpret it
"""
global _arch # pylint: disable=global-statement
if not _arch:
_arch = detect_arch()
return _arch
_uname_arch = None
def get_uname_arch():
"""
Returns arch of the current host as the kernel would interpret it
"""
global _uname_arch # pylint: disable=global-statement
if not _uname_arch:
_uname_arch = detect_uname_arch()
return _uname_arch
_kernel_version = {}
def get_kernel_version(kernel_dir):
"""
Returns the kernel version for the given kernel_dir.
"""
kernel_dir_canon = os.path.realpath(kernel_dir)
if kernel_dir_canon in _kernel_version:
return _kernel_version[kernel_dir_canon]
_kernel_version[kernel_dir_canon] = subprocess.run(['make', 'kernelversion'], cwd=kernel_dir_canon, check=True, stdout=subprocess.PIPE).stdout.decode().strip().splitlines()[0]
return _kernel_version[kernel_dir_canon]
def load_kconfig(kernel_dir):
kconfig_file = os.path.join(kernel_dir, "Kconfig")
if not os.path.isfile(kconfig_file):
raise ValueError("'{}' must point to a valid Kconfig file!".format(kconfig_file))
kver = get_kernel_version(kernel_dir)
log.info("Loading '{}' (version {})".format(kconfig_file, kver))
os.environ['srctree'] = kernel_dir
os.environ["ARCH"] = os.environ["SRCARCH"] = get_arch()
os.environ["KERNELVERSION"] = kver
kconfig = kconfiglib.Kconfig(os.path.realpath(kconfig_file), warn_to_stderr=False)
for w in kconfig.warnings:
for line in w.split('\n'):
log.verbose(line)
return kconfig
def allnoconfig(kconfig):
"""
Resets the current configuration to the equivalent of calling
`make allnoconfig` in the kernel source tree.
"""
log.info("Applying allnoconfig")
# Allnoconfig from kconfiglib/allnoconfig.py
warn_save = kconfig.warn
kconfig.warn = False
for sym in kconfig.unique_defined_syms:
sym.set_value('y' if sym.is_allnoconfig_y else 'n')
kconfig.warn = warn_save
kconfig.load_allconfig("allno.config")
class ExprSymbol:
def __init__(self, sym):
self.sym = sym
def is_satisfied(self):
return tri_to_bool(self.sym.tri_value)
class ExprCompare:
def __init__(self, cmp_type, lhs, rhs):
self.cmp_type = cmp_type
self.lhs = lhs
self.rhs = rhs
def is_satisfied(self):
if self.cmp_type == kconfiglib.EQUAL:
return self.lhs == self.rhs
elif self.cmp_type == kconfiglib.UNEQUAL:
return self.lhs != self.rhs
elif self.cmp_type == kconfiglib.LESS:
return self.lhs < self.rhs
elif self.cmp_type == kconfiglib.LESS_EQUAL:
return self.lhs <= self.rhs
elif self.cmp_type == kconfiglib.GREATER:
return self.lhs > self.rhs
elif self.cmp_type == kconfiglib.GREATER_EQUAL:
return self.lhs >= self.rhs
def __str__(self):
return "{} {} {}".format(self.lhs.name, kconfiglib.REL_TO_STR[self.cmp_type], self.rhs.name)
class ExprIgnore:
def is_satisfied(self):
return False
class Expr:
def __init__(self, sym):
self.sym = sym
self.symbols = []
self.expr_ignore_sym = None
self.expr = self._parse(sym.direct_dep)
def _add_symbol_if_nontrivial(self, sym, trivialize=True):
if sym.__class__ is ExprSymbol and not symbol_can_be_user_assigned(sym.sym):
return sympy.true if kconfiglib.expr_value(sym.sym) else sympy.false
# If the symbol is aleady satisfied in the current config,
# skip it.
if trivialize and sym.is_satisfied():
return sympy.true
# Return existing symbol if possible
for s, sympy_s in self.symbols:
if s.__class__ is sym.__class__ is ExprSymbol:
if s.sym == sym.sym:
return sympy_s
# Create new symbol
i = len(self.symbols)
s = sympy.Symbol(str(i))
self.symbols.append((sym, s))
return s
def _parse(self, expr, trivialize=True):
def add_sym(expr, trivialize=trivialize):
return self._add_symbol_if_nontrivial(ExprSymbol(expr), trivialize)
if expr.__class__ is not tuple:
if expr.__class__ is kconfiglib.Symbol:
if expr.is_constant:
return sympy.true if tri_to_bool(expr) else sympy.false
elif expr.type in [kconfiglib.BOOL, kconfiglib.TRISTATE]:
return add_sym(expr)
else:
# Ignore unknown symbol types
return self.expr_ignore()
elif expr.__class__ is kconfiglib.Choice:
return self.expr_ignore()
else:
raise ValueError("Unexpected expression type '{}'".format(expr.__class__.__name__))
else:
# If the expression is an operator, resolve the operator.
if expr[0] is kconfiglib.AND:
return sympy.And(self._parse(expr[1]), self._parse(expr[2]))
elif expr[0] is kconfiglib.OR:
return sympy.Or(self._parse(expr[1]), self._parse(expr[2]))
elif expr[0] is kconfiglib.NOT:
return sympy.Not(self._parse(expr[1], trivialize=False))
elif expr[0] is kconfiglib.EQUAL and expr[2].is_constant:
if tri_to_bool(expr[2]):
return add_sym(expr[1], trivialize=False)
else:
return sympy.Not(ExprSymbol(expr[1]))
elif expr[0] in [kconfiglib.UNEQUAL, kconfiglib.LESS, kconfiglib.LESS_EQUAL, kconfiglib.GREATER, kconfiglib.GREATER_EQUAL]:
if expr[1].__class__ is tuple or expr[2].__class__ is tuple:
raise ValueError("Cannot compare expressions")
return self._add_symbol_if_nontrivial(ExprCompare(expr[0], expr[1], expr[2]), trivialize)
else:
raise ValueError("Unknown expression type: '{}'".format(expr[0]))
def expr_ignore(self):
if not self.expr_ignore_sym:
self.expr_ignore_sym = self._add_symbol_if_nontrivial(ExprIgnore())
return self.expr_ignore_sym
def simplify(self):
self.expr = simplify_logic(self.expr)
def unsatisfied_deps(self):
configuration = satisfiable(self.expr)
if not configuration:
return False
# If configuration is 'True', return none.
if configuration.get(True, False):
return []
deps = []
for k in configuration:
idx = int(k.name)
deps.append((idx, self.symbols[idx][0], configuration[k]))
deps.sort(key=lambda x: x[0], reverse=True)
return deps
def required_deps(sym):
expr = Expr(sym)
expr.simplify()
deps = []
unsat_deps = expr.unsatisfied_deps()
if unsat_deps is False:
return False
for _, s, v in unsat_deps:
if s.__class__ is ExprIgnore:
pass
elif s.__class__ is ExprSymbol:
deps.append((s.sym, v))
else:
raise ValueError("Cannot automatically satisfy inequality: '{}'".format(s))
return deps
| from . import log
import subprocess
import os
import re
import kconfiglib
import sympy
from sympy.logic import simplify_logic
from sympy.logic.inference import satisfiable
def symbol_can_be_user_assigned(sym):
for node in sym.nodes:
if node.prompt:
return True
return False
value_to_str_color = {
'n': "[1;31m",
'm': "[1;33m",
'y': "[1;32m",
}
def value_to_str(value):
if value in kconfiglib.STR_TO_TRI:
return '[{}{}{}]'.format(log.color(value_to_str_color[value]), value, log.color_reset)
else:
return "'{}'".format(value)
def tri_to_bool(tri):
"""
Converts a tristate to a boolean value (['n'] → False, ['m', 'y'] → True)
"""
return tri != kconfiglib.STR_TO_TRI['n']
def expr_value_bool(expr):
"""
Evaluates the given expression using kconfiglib.expr_value(expr) and converts
the result to a boolean value using tri_to_bool().
"""
return tri_to_bool(kconfiglib.expr_value(expr))
def set_env_default(var, default_value):
"""
Sets an environment variable to the given default_value if it is currently unset.
"""
if var not in os.environ:
os.environ[var] = default_value
def detect_uname_arch():
return subprocess.run(['uname', '-m'], check=True, stdout=subprocess.PIPE).stdout.decode().strip().splitlines()[0]
def detect_arch():
arch = get_uname_arch()
arch = re.sub('i.86', 'x86', arch)
arch = re.sub('x86_64', 'x86', arch)
arch = re.sub('sun4u', 'sparc64', arch)
arch = re.sub('arm.*', 'arm', arch)
arch = re.sub('sa110', 'arm', arch)
arch = re.sub('s390x', 's390', arch)
arch = re.sub('parisc64', 'parisc', arch)
arch = re.sub('ppc.*', 'powerpc', arch)
arch = re.sub('mips.*', 'mips', arch)
arch = re.sub('sh[234].*', 'sh', arch)
arch = re.sub('aarch64.*', 'arm64', arch)
arch = re.sub('riscv.*', 'riscv', arch)
return arch
def initialize_environment():
"""
Initializes important environment variables, if not set by the user.
like
"""
set_env_default("CC", "gcc")
set_env_default("LD", "ldd")
set_env_default("HOSTCC", "gcc")
set_env_default("HOSTCXX", "g++")
if "CC_VERSION_TEXT" not in os.environ:
os.environ["CC_VERSION_TEXT"] = subprocess.run([os.environ['CC'], '--version'], check=True, stdout=subprocess.PIPE).stdout.decode().strip().splitlines()[0]
_arch = None
def get_arch():
"""
Returns arch of the current host as the kernel would interpret it
"""
global _arch # pylint: disable=global-statement
if not _arch:
_arch = detect_arch()
return _arch
_uname_arch = None
def get_uname_arch():
"""
Returns arch of the current host as the kernel would interpret it
"""
global _uname_arch # pylint: disable=global-statement
if not _uname_arch:
_uname_arch = detect_uname_arch()
return _uname_arch
_kernel_version = {}
def get_kernel_version(kernel_dir):
"""
Returns the kernel version for the given kernel_dir.
"""
kernel_dir_canon = os.path.realpath(kernel_dir)
if kernel_dir_canon in _kernel_version:
return _kernel_version[kernel_dir_canon]
_kernel_version[kernel_dir_canon] = subprocess.run(['make', 'kernelversion'], cwd=kernel_dir_canon, check=True, stdout=subprocess.PIPE).stdout.decode().strip().splitlines()[0]
return _kernel_version[kernel_dir_canon]
def load_kconfig(kernel_dir):
kconfig_file = os.path.join(kernel_dir, "Kconfig")
if not os.path.isfile(kconfig_file):
raise ValueError("'{}' must point to a valid Kconfig file!".format(kconfig_file))
kver = get_kernel_version(kernel_dir)
log.info("Loading '{}' (version {})".format(kconfig_file, kver))
os.environ['srctree'] = kernel_dir
os.environ["ARCH"] = os.environ["SRCARCH"] = get_arch()
os.environ["KERNELVERSION"] = kver
kconfig = kconfiglib.Kconfig(os.path.realpath(kconfig_file), warn_to_stderr=False)
for w in kconfig.warnings:
for line in w.split('\n'):
log.verbose(line)
return kconfig
def allnoconfig(kconfig):
"""
Resets the current configuration to the equivalent of calling
`make allnoconfig` in the kernel source tree.
"""
log.info("Applying allnoconfig")
# Allnoconfig from kconfiglib/allnoconfig.py
warn_save = kconfig.warn
kconfig.warn = False
for sym in kconfig.unique_defined_syms:
sym.set_value('y' if sym.is_allnoconfig_y else 'n')
kconfig.warn = warn_save
kconfig.load_allconfig("allno.config")
class ExprSymbol:
def __init__(self, sym):
self.sym = sym
def is_satisfied(self):
return tri_to_bool(self.sym.tri_value)
class ExprCompare:
def __init__(self, cmp_type, lhs, rhs):
self.cmp_type = cmp_type
self.lhs = lhs
self.rhs = rhs
def is_satisfied(self):
if self.cmp_type == kconfiglib.EQUAL:
return self.lhs == self.rhs
elif self.cmp_type == kconfiglib.UNEQUAL:
return self.lhs != self.rhs
elif self.cmp_type == kconfiglib.LESS:
return self.lhs < self.rhs
elif self.cmp_type == kconfiglib.LESS_EQUAL:
return self.lhs <= self.rhs
elif self.cmp_type == kconfiglib.GREATER:
return self.lhs > self.rhs
elif self.cmp_type == kconfiglib.GREATER_EQUAL:
return self.lhs >= self.rhs
def __str__(self):
return "{} {} {}".format(self.lhs.name, kconfiglib.REL_TO_STR[self.cmp_type], self.rhs.name)
class ExprIgnore:
def is_satisfied(self):
return False
class Expr:
def __init__(self, sym):
self.sym = sym
self.symbols = []
self.expr_ignore_sym = None
self.expr = self._parse(sym.direct_dep)
def _add_symbol_if_nontrivial(self, sym, trivialize=True):
if sym.__class__ is ExprSymbol and not symbol_can_be_user_assigned(sym.sym):
return sympy.true if kconfiglib.expr_value(sym.sym) else sympy.false
# If the symbol is aleady satisfied in the current config,
# skip it.
if trivialize and sym.is_satisfied():
return sympy.true
# Return existing symbol if possible
for s, sympy_s in self.symbols:
if s.__class__ is sym.__class__ is ExprSymbol:
if s.sym == sym.sym:
return sympy_s
# Create new symbol
i = len(self.symbols)
s = sympy.Symbol(str(i))
self.symbols.append((sym, s))
return s
def _parse(self, expr, trivialize=True):
def add_sym(expr, trivialize=trivialize):
return self._add_symbol_if_nontrivial(ExprSymbol(expr), trivialize)
if expr.__class__ is not tuple:
if expr.__class__ is kconfiglib.Symbol:
if expr.is_constant:
return sympy.true if tri_to_bool(expr) else sympy.false
elif expr.type in [kconfiglib.BOOL, kconfiglib.TRISTATE]:
return add_sym(expr)
else:
# Ignore unknown symbol types
return self.expr_ignore()
elif expr.__class__ is kconfiglib.Choice:
return self.expr_ignore()
else:
raise ValueError("Unexpected expression type '{}'".format(expr.__class__.__name__))
else:
# If the expression is an operator, resolve the operator.
if expr[0] is kconfiglib.AND:
return sympy.And(self._parse(expr[1]), self._parse(expr[2]))
elif expr[0] is kconfiglib.OR:
return sympy.Or(self._parse(expr[1]), self._parse(expr[2]))
elif expr[0] is kconfiglib.NOT:
return sympy.Not(self._parse(expr[1], trivialize=False))
elif expr[0] is kconfiglib.EQUAL and expr[2].is_constant:
if tri_to_bool(expr[2]):
return add_sym(expr[1], trivialize=False)
else:
return sympy.Not(ExprSymbol(expr[1]))
elif expr[0] in [kconfiglib.UNEQUAL, kconfiglib.LESS, kconfiglib.LESS_EQUAL, kconfiglib.GREATER, kconfiglib.GREATER_EQUAL]:
if expr[1].__class__ is tuple or expr[2].__class__ is tuple:
raise ValueError("Cannot compare expressions")
return self._add_symbol_if_nontrivial(ExprCompare(expr[0], expr[1], expr[2]), trivialize)
else:
raise ValueError("Unknown expression type: '{}'".format(expr[0]))
def expr_ignore(self):
if not self.expr_ignore_sym:
self.expr_ignore_sym = self._add_symbol_if_nontrivial(ExprIgnore())
return self.expr_ignore_sym
def simplify(self):
self.expr = simplify_logic(self.expr)
def unsatisfied_deps(self):
configuration = satisfiable(self.expr)
if not configuration:
return False
# If configuration is 'True', return none.
if configuration.get(True, False):
return []
deps = []
for k in configuration:
idx = int(k.name)
deps.append((idx, self.symbols[idx][0], configuration[k]))
deps.sort(key=lambda x: x[0], reverse=True)
return deps
def required_deps(sym):
expr = Expr(sym)
expr.simplify()
deps = []
unsat_deps = expr.unsatisfied_deps()
if unsat_deps is False:
return False
for _, s, v in unsat_deps:
if s.__class__ is ExprIgnore:
pass
elif s.__class__ is ExprSymbol:
deps.append((s.sym, v))
else:
raise ValueError("Cannot automatically satisfy inequality: '{}'".format(s))
return deps
| en | 0.708735 | Converts a tristate to a boolean value (['n'] → False, ['m', 'y'] → True) Evaluates the given expression using kconfiglib.expr_value(expr) and converts the result to a boolean value using tri_to_bool(). Sets an environment variable to the given default_value if it is currently unset. Initializes important environment variables, if not set by the user. like Returns arch of the current host as the kernel would interpret it # pylint: disable=global-statement Returns arch of the current host as the kernel would interpret it # pylint: disable=global-statement Returns the kernel version for the given kernel_dir. Resets the current configuration to the equivalent of calling `make allnoconfig` in the kernel source tree. # Allnoconfig from kconfiglib/allnoconfig.py # If the symbol is aleady satisfied in the current config, # skip it. # Return existing symbol if possible # Create new symbol # Ignore unknown symbol types # If the expression is an operator, resolve the operator. # If configuration is 'True', return none. | 2.342531 | 2 |
snakes/grid.py | jan-g/hacking-the-cryptic | 0 | 6619063 | from collections import namedtuple
from z3 import Solver, Int, sat, Not, And, Bool
Empty = namedtuple("Empty", [])
HeadMark = namedtuple("HeadMark", ["distance"])
TailMark = namedtuple("TailMark", ["distance"])
Snake = namedtuple("Snake", [])
Head = namedtuple("Head", [])
Tail = namedtuple("Tail", [])
Unknown = namedtuple("Unknown", [])
class Grid:
def __init__(self, lines):
self.grid = {}
for y, line in enumerate(lines):
for x, char in enumerate(line):
self.grid[x, y] = char
self.height = len(lines)
self.width = len(lines[0])
self.solver = Solver()
self.model = None
# Set up variables
self.vars = {}
# Head location
self["hx"] = Int("hx")
self["hy"] = Int("hy")
# Tail location
self["tx"] = Int("tx")
self["ty"] = Int("ty")
# Cells with a head or tail marker
for y in range(self.height):
for x in range(self.width):
if self.grid[x, y] == "H":
v = "head_{}_{}".format(x, y)
self[v] = Int(v)
if self.grid[x, y] == "T":
v = "tail_{}_{}".format(x, y)
self[v] = Int(v)
# Cells with a potential snake
for y in range(self.height):
for x in range(self.width):
if self.grid[x, y] == ".":
v = "snake_{}_{}".format(x, y)
self[v] = Bool(v)
def __setitem__(self, item, var):
if item in self.vars:
raise Exception("can't insert a variable twice")
self.vars[item] = var
def __getitem__(self, item):
return self.vars[item]
def __call__(self, x, y):
c = self.grid[x, y]
if c == ".":
hx, hy = self.head()
if x == self.eval(hx) and y == self.eval(hy):
return Head()
tx, ty = self.tail()
if x == self.eval(tx) and y == self.eval(ty):
return Tail()
s = self.snake(x, y)
try:
if self.eval(s):
return Snake()
return Empty()
except:
return Unknown()
elif c == "H":
return HeadMark(self.eval(self["head_{}_{}".format(x, y)]))
elif c == "T":
return TailMark(self.eval(self["tail_{}_{}".format(x, y)]))
else:
raise ValueError("unknown value at {}, {}".format(x, y))
def solve(self):
"""Find a first or next solution"""
if self.model is not None:
# We hae a potential solution. Collect up the values assigned to each variable in it
# This gives us, eg, [hx == 0, hy == 0]
assignments = [var == self.eval(var) for (_, var) in self.vars.items()]
# Now, add the negation of the conjunction of these. In other words, explicitly rule out
# this particular solution
self.add(Not(And(*assignments)))
if self.solver.check() != sat:
return False
self.model = self.solver.model()
return True
def eval(self, var):
return self.model.eval(var)
def add(self, constraint):
"""Add a constraint to be satisfied"""
self.solver.add(constraint)
def head(self):
return self["hx"], self["hy"]
def tail(self):
return self["tx"], self["ty"]
def snake_possible(self, x, y):
return self.grid[x, y] == "."
def head_mark(self, x, y):
if self.grid[x, y] == "H":
return self["head_{}_{}".format(x, y)]
return None
def tail_mark(self, x, y):
if self.grid[x, y] == "T":
return self["tail_{}_{}".format(x, y)]
return None
def snake(self, x, y):
if self.grid.get((x, y)) == ".":
return self["snake_{}_{}".format(x, y)]
return None
def orthogonal_neighbours(self, x, y):
result = {}
for (dx, dy) in ((-1, 0), (1, 0), (0, -1), (0, 1)):
s = self.snake(x + dx, y + dy)
if s is not None:
result[x + dx, y + dy] = s
return result
def surrounding_cells(self, x, y):
result = {}
for (dx, dy) in ((-1, -1), (-1, 0), (-1, 1), (0, -1), (0, 1), (1, -1), (1, 0), (1, 1)):
s = self.snake(x + dx, y + dy)
if s is not None:
result[x + dx, y + dy] = s
return result
def coords(self):
for y in range(self.height):
for x in range(self.width):
yield x, y
| from collections import namedtuple
from z3 import Solver, Int, sat, Not, And, Bool
Empty = namedtuple("Empty", [])
HeadMark = namedtuple("HeadMark", ["distance"])
TailMark = namedtuple("TailMark", ["distance"])
Snake = namedtuple("Snake", [])
Head = namedtuple("Head", [])
Tail = namedtuple("Tail", [])
Unknown = namedtuple("Unknown", [])
class Grid:
def __init__(self, lines):
self.grid = {}
for y, line in enumerate(lines):
for x, char in enumerate(line):
self.grid[x, y] = char
self.height = len(lines)
self.width = len(lines[0])
self.solver = Solver()
self.model = None
# Set up variables
self.vars = {}
# Head location
self["hx"] = Int("hx")
self["hy"] = Int("hy")
# Tail location
self["tx"] = Int("tx")
self["ty"] = Int("ty")
# Cells with a head or tail marker
for y in range(self.height):
for x in range(self.width):
if self.grid[x, y] == "H":
v = "head_{}_{}".format(x, y)
self[v] = Int(v)
if self.grid[x, y] == "T":
v = "tail_{}_{}".format(x, y)
self[v] = Int(v)
# Cells with a potential snake
for y in range(self.height):
for x in range(self.width):
if self.grid[x, y] == ".":
v = "snake_{}_{}".format(x, y)
self[v] = Bool(v)
def __setitem__(self, item, var):
if item in self.vars:
raise Exception("can't insert a variable twice")
self.vars[item] = var
def __getitem__(self, item):
return self.vars[item]
def __call__(self, x, y):
c = self.grid[x, y]
if c == ".":
hx, hy = self.head()
if x == self.eval(hx) and y == self.eval(hy):
return Head()
tx, ty = self.tail()
if x == self.eval(tx) and y == self.eval(ty):
return Tail()
s = self.snake(x, y)
try:
if self.eval(s):
return Snake()
return Empty()
except:
return Unknown()
elif c == "H":
return HeadMark(self.eval(self["head_{}_{}".format(x, y)]))
elif c == "T":
return TailMark(self.eval(self["tail_{}_{}".format(x, y)]))
else:
raise ValueError("unknown value at {}, {}".format(x, y))
def solve(self):
"""Find a first or next solution"""
if self.model is not None:
# We hae a potential solution. Collect up the values assigned to each variable in it
# This gives us, eg, [hx == 0, hy == 0]
assignments = [var == self.eval(var) for (_, var) in self.vars.items()]
# Now, add the negation of the conjunction of these. In other words, explicitly rule out
# this particular solution
self.add(Not(And(*assignments)))
if self.solver.check() != sat:
return False
self.model = self.solver.model()
return True
def eval(self, var):
return self.model.eval(var)
def add(self, constraint):
"""Add a constraint to be satisfied"""
self.solver.add(constraint)
def head(self):
return self["hx"], self["hy"]
def tail(self):
return self["tx"], self["ty"]
def snake_possible(self, x, y):
return self.grid[x, y] == "."
def head_mark(self, x, y):
if self.grid[x, y] == "H":
return self["head_{}_{}".format(x, y)]
return None
def tail_mark(self, x, y):
if self.grid[x, y] == "T":
return self["tail_{}_{}".format(x, y)]
return None
def snake(self, x, y):
if self.grid.get((x, y)) == ".":
return self["snake_{}_{}".format(x, y)]
return None
def orthogonal_neighbours(self, x, y):
result = {}
for (dx, dy) in ((-1, 0), (1, 0), (0, -1), (0, 1)):
s = self.snake(x + dx, y + dy)
if s is not None:
result[x + dx, y + dy] = s
return result
def surrounding_cells(self, x, y):
result = {}
for (dx, dy) in ((-1, -1), (-1, 0), (-1, 1), (0, -1), (0, 1), (1, -1), (1, 0), (1, 1)):
s = self.snake(x + dx, y + dy)
if s is not None:
result[x + dx, y + dy] = s
return result
def coords(self):
for y in range(self.height):
for x in range(self.width):
yield x, y
| en | 0.817079 | # Set up variables # Head location # Tail location # Cells with a head or tail marker # Cells with a potential snake Find a first or next solution # We hae a potential solution. Collect up the values assigned to each variable in it # This gives us, eg, [hx == 0, hy == 0] # Now, add the negation of the conjunction of these. In other words, explicitly rule out # this particular solution Add a constraint to be satisfied | 3.277879 | 3 |
sciencebeam_gym/trainer/util.py | elifesciences/sciencebeam-gym | 25 | 6619064 | <gh_stars>10-100
# partially copied from tensorflow example project
import logging
import numpy as np
import tensorflow as tf
from tensorflow.python.framework import ops # pylint: disable=E0611
from tensorflow.python.client.session import Session # pylint: disable=E0611
from tensorflow.python.training.saver import get_checkpoint_state # pylint: disable=E0611
def get_logger():
return logging.getLogger(__name__)
class CustomSessionManager(object):
def __init__(self, session_init_fn, graph=None):
self._session_init_fn = session_init_fn
if graph is None:
graph = ops.get_default_graph()
self._graph = graph
def prepare_session(self, master, checkpoint_dir=None, saver=None, config=None, **_):
logger = get_logger()
logger.info('prepare_session')
session = Session(master, graph=self._graph, config=config)
self._session_init_fn(session)
if saver and checkpoint_dir:
ckpt = get_checkpoint_state(checkpoint_dir)
if ckpt and ckpt.model_checkpoint_path: # pylint: disable=no-member
logger.info('restoring from %s',
ckpt.model_checkpoint_path) # pylint: disable=no-member
saver.restore(session, ckpt.model_checkpoint_path) # pylint: disable=no-member
saver.recover_last_checkpoints(
ckpt.all_model_checkpoint_paths) # pylint: disable=no-member
else:
logger.info('no valid checkpoint in %s', checkpoint_dir)
return session
class CustomSupervisor(tf.train.Supervisor):
def __init__(self, model, graph, init_op=None, ready_op=None, save_model_secs=0, **kwargs):
with graph.as_default():
init_op = tf.global_variables_initializer()
def custom_init(session):
logging.info('initializing, session: %s', session)
session.run(init_op)
model.initialize(session)
return True
session_manager = CustomSessionManager(
session_init_fn=custom_init,
graph=graph
)
super().__init__(
session_manager=session_manager,
graph=graph,
init_op=init_op,
ready_op=ready_op,
save_model_secs=save_model_secs,
**kwargs
)
class SimpleStepScheduler(object):
"""
Rather than using threads, with this scheduler the client has full control.
For example it can be triggered any time intentionally or at the end.
"""
def __init__(self, do_fn, min_interval, min_freq=0, step=0, last_run=None):
self.do_fn = do_fn
self.min_interval = min_interval
self.min_freq = min_freq
self.current_step = step
self.last_run = last_run
self.dirty = False
def run_now(self, now):
self.do_fn()
self.last_run = now
self.dirty = False
def should_trigger(self, now):
result = (
(
(self.min_freq > 0) and
(self.current_step % self.min_freq == 0)
) or
(
(self.min_interval > 0) and
(self.last_run is None or (now - self.last_run) >= self.min_interval)
)
)
if result:
get_logger().info(
'should_trigger: current_step:%s, min_freq=%s, now=%s, '
'last_run=%s, min_interval=%s, result=%s',
self.current_step, self.min_freq, now,
self.last_run, self.min_interval, result
)
return result
def step(self, now):
self.current_step += 1
if self.should_trigger(now=now):
self.run_now(now=now)
else:
self.dirty = True
def flush(self, now):
if self.dirty:
self.run_now(now)
def loss(loss_value):
"""Calculates aggregated mean loss."""
total_loss = tf.Variable(0.0, False)
loss_count = tf.Variable(0, False)
total_loss_update = tf.assign_add(total_loss, loss_value)
loss_count_update = tf.assign_add(loss_count, 1)
loss_op = total_loss / tf.cast(loss_count, tf.float32)
return [total_loss_update, loss_count_update], loss_op
def get_graph_size():
return sum([
int(np.product(v.get_shape().as_list()) * v.dtype.size)
for v in tf.global_variables()
])
| # partially copied from tensorflow example project
import logging
import numpy as np
import tensorflow as tf
from tensorflow.python.framework import ops # pylint: disable=E0611
from tensorflow.python.client.session import Session # pylint: disable=E0611
from tensorflow.python.training.saver import get_checkpoint_state # pylint: disable=E0611
def get_logger():
return logging.getLogger(__name__)
class CustomSessionManager(object):
def __init__(self, session_init_fn, graph=None):
self._session_init_fn = session_init_fn
if graph is None:
graph = ops.get_default_graph()
self._graph = graph
def prepare_session(self, master, checkpoint_dir=None, saver=None, config=None, **_):
logger = get_logger()
logger.info('prepare_session')
session = Session(master, graph=self._graph, config=config)
self._session_init_fn(session)
if saver and checkpoint_dir:
ckpt = get_checkpoint_state(checkpoint_dir)
if ckpt and ckpt.model_checkpoint_path: # pylint: disable=no-member
logger.info('restoring from %s',
ckpt.model_checkpoint_path) # pylint: disable=no-member
saver.restore(session, ckpt.model_checkpoint_path) # pylint: disable=no-member
saver.recover_last_checkpoints(
ckpt.all_model_checkpoint_paths) # pylint: disable=no-member
else:
logger.info('no valid checkpoint in %s', checkpoint_dir)
return session
class CustomSupervisor(tf.train.Supervisor):
def __init__(self, model, graph, init_op=None, ready_op=None, save_model_secs=0, **kwargs):
with graph.as_default():
init_op = tf.global_variables_initializer()
def custom_init(session):
logging.info('initializing, session: %s', session)
session.run(init_op)
model.initialize(session)
return True
session_manager = CustomSessionManager(
session_init_fn=custom_init,
graph=graph
)
super().__init__(
session_manager=session_manager,
graph=graph,
init_op=init_op,
ready_op=ready_op,
save_model_secs=save_model_secs,
**kwargs
)
class SimpleStepScheduler(object):
"""
Rather than using threads, with this scheduler the client has full control.
For example it can be triggered any time intentionally or at the end.
"""
def __init__(self, do_fn, min_interval, min_freq=0, step=0, last_run=None):
self.do_fn = do_fn
self.min_interval = min_interval
self.min_freq = min_freq
self.current_step = step
self.last_run = last_run
self.dirty = False
def run_now(self, now):
self.do_fn()
self.last_run = now
self.dirty = False
def should_trigger(self, now):
result = (
(
(self.min_freq > 0) and
(self.current_step % self.min_freq == 0)
) or
(
(self.min_interval > 0) and
(self.last_run is None or (now - self.last_run) >= self.min_interval)
)
)
if result:
get_logger().info(
'should_trigger: current_step:%s, min_freq=%s, now=%s, '
'last_run=%s, min_interval=%s, result=%s',
self.current_step, self.min_freq, now,
self.last_run, self.min_interval, result
)
return result
def step(self, now):
self.current_step += 1
if self.should_trigger(now=now):
self.run_now(now=now)
else:
self.dirty = True
def flush(self, now):
if self.dirty:
self.run_now(now)
def loss(loss_value):
"""Calculates aggregated mean loss."""
total_loss = tf.Variable(0.0, False)
loss_count = tf.Variable(0, False)
total_loss_update = tf.assign_add(total_loss, loss_value)
loss_count_update = tf.assign_add(loss_count, 1)
loss_op = total_loss / tf.cast(loss_count, tf.float32)
return [total_loss_update, loss_count_update], loss_op
def get_graph_size():
return sum([
int(np.product(v.get_shape().as_list()) * v.dtype.size)
for v in tf.global_variables()
]) | en | 0.762023 | # partially copied from tensorflow example project # pylint: disable=E0611 # pylint: disable=E0611 # pylint: disable=E0611 # pylint: disable=no-member # pylint: disable=no-member # pylint: disable=no-member # pylint: disable=no-member Rather than using threads, with this scheduler the client has full control. For example it can be triggered any time intentionally or at the end. Calculates aggregated mean loss. | 2.194747 | 2 |
nanosound_oled/nanosoundcd_status.py | michaelpeeters/Nanomesher_NanoSound | 21 | 6619065 | <filename>nanosound_oled/nanosoundcd_status.py
from pymemcache.client.base import Client
from pymemcache import serde
from datetime import datetime
import os
last_report = None
last_ripping = False
def is_nanosoundcd_installed():
return os.path.isdir("/home/volumio/nanomesher_nanosoundcd")
def to_display():
global last_report, last_ripping
res = is_rip_on_going()
if (res[0] == True):
if(last_report != res[2]):
last_report = res[2]
last_ripping = True
return [True,res[1]]
else:
last_ripping = True
return None
else:
if(last_ripping==True):
#was ripping but now stopped
last_ripping = False
return [False,res[1]]
else:
last_ripping = False
return None
def is_rip_on_going():
client = Client(('localhost', 11211), serializer=serde.python_memcache_serializer,
deserializer=serde.python_memcache_deserializer)
ripinfodict = client.get('ripprogress')
if (ripinfodict <> None):
if ('riplastupdate' in ripinfodict):
riplastupdate = datetime.strptime(ripinfodict['riplastupdate'], "%Y-%m-%d %H:%M:%S")
if ((((datetime.now() - riplastupdate).seconds) > 20)):
return [False]
elif ((((datetime.now() - riplastupdate).seconds) <= 20)):
if ('albumname' in ripinfodict):
return [True, ripinfodict["albumname"], ripinfodict["ripstart"]]
else:
return [True, "via NanoSound CD", ripinfodict["ripstart"]]
else:
return [False]
else:
return [False]
| <filename>nanosound_oled/nanosoundcd_status.py
from pymemcache.client.base import Client
from pymemcache import serde
from datetime import datetime
import os
last_report = None
last_ripping = False
def is_nanosoundcd_installed():
return os.path.isdir("/home/volumio/nanomesher_nanosoundcd")
def to_display():
global last_report, last_ripping
res = is_rip_on_going()
if (res[0] == True):
if(last_report != res[2]):
last_report = res[2]
last_ripping = True
return [True,res[1]]
else:
last_ripping = True
return None
else:
if(last_ripping==True):
#was ripping but now stopped
last_ripping = False
return [False,res[1]]
else:
last_ripping = False
return None
def is_rip_on_going():
client = Client(('localhost', 11211), serializer=serde.python_memcache_serializer,
deserializer=serde.python_memcache_deserializer)
ripinfodict = client.get('ripprogress')
if (ripinfodict <> None):
if ('riplastupdate' in ripinfodict):
riplastupdate = datetime.strptime(ripinfodict['riplastupdate'], "%Y-%m-%d %H:%M:%S")
if ((((datetime.now() - riplastupdate).seconds) > 20)):
return [False]
elif ((((datetime.now() - riplastupdate).seconds) <= 20)):
if ('albumname' in ripinfodict):
return [True, ripinfodict["albumname"], ripinfodict["ripstart"]]
else:
return [True, "via NanoSound CD", ripinfodict["ripstart"]]
else:
return [False]
else:
return [False]
| en | 0.92509 | #was ripping but now stopped | 2.423831 | 2 |
src/graph.py | artemlunev2000/Graph-database | 0 | 6619066 | <reponame>artemlunev2000/Graph-database
from pyformlang.regular_expression import Regex
from pygraphblas import Matrix, BOOL, Vector
class Graph:
def __init__(self):
self.size = 0
self.vertices = set()
self.start_vertices = set()
self.final_vertices = set()
self.labels = set()
self.projection_matrices = dict()
def read_graph(self, path):
with open(path, 'r') as graph_file:
lines = graph_file.readlines()
for line in lines:
edge = line.split(" ")
v_from = int(edge[0])
v_to = int(edge[2])
self.vertices.add(v_from)
self.vertices.add(v_to)
self.size = max(self.size, v_from + 1, v_to + 1)
for line in lines:
edge = line.split(" ")
v_from = int(edge[0])
v_to = int(edge[2])
label = edge[1]
if label not in self.labels:
self.labels.add(label)
self.projection_matrices[label] = Matrix.sparse(BOOL, self.size, self.size)
self.projection_matrices[label][v_from, v_to] = True
self.start_vertices = self.vertices
self.final_vertices = self.vertices
def read_regex(self, path):
with open(path, 'r') as regex_file:
regex = regex_file.readline().rstrip()
dfa = Regex(regex).to_epsilon_nfa().to_deterministic().minimize()
self.size = len(dfa.states)
state_number = 0
state_number_dict = dict()
for state in dfa.states:
state_number_dict[state] = state_number
self.vertices.add(state_number)
state_number += 1
for v_from, label, v_to in dfa._transition_function.get_edges():
if label not in self.labels:
self.projection_matrices[label] = Matrix.sparse(BOOL, self.size, self.size)
self.labels.add(label)
self.projection_matrices[label][state_number_dict[v_from], state_number_dict[v_to]] = True
for state in dfa.start_states:
self.start_vertices.add(state_number_dict[state])
for state in dfa.final_states:
self.final_vertices.add(state_number_dict[state])
def intersect(self, graph):
intersection = Graph()
intersection.size = self.size * graph.size
for label in self.labels:
if label in graph.labels:
intersection.labels.add(label)
intersection.projection_matrices[label] = self.projection_matrices[label].kronecker(
graph.projection_matrices[label])
for st1 in self.vertices:
for st2 in graph.vertices:
intersection_state = st1 * graph.size + st2
intersection.vertices.add(intersection_state)
if st1 in self.start_vertices and st2 in graph.start_vertices:
intersection.start_vertices.add(intersection_state)
if st1 in self.final_vertices and st2 in graph.final_vertices:
intersection.final_vertices.add(intersection_state)
return intersection
def transitive_closure_square(self):
closure = Matrix.sparse(BOOL, self.size, self.size)
for label in self.labels:
closure += self.projection_matrices[label]
prev_nvals = -1
while prev_nvals != closure.nvals:
prev_nvals = closure.nvals
closure += closure @ closure
return closure
def transitive_closure_mul(self):
closure = Matrix.sparse(BOOL, self.size, self.size)
for label in self.labels:
closure += self.projection_matrices[label]
dup = closure.dup()
prev_nvals = -1
while prev_nvals != closure.nvals:
prev_nvals = closure.nvals
closure += dup @ closure
return closure
def reachable_with_start_states(self, start):
result = self.transitive_closure_square()
for state in range(self.size):
if state not in start:
result.assign_row(state, Vector.sparse(BOOL, self.size))
return result
def reachable_with_start_and_final_states(self, start, final):
result = self.transitive_closure_square()
for state in range(self.size):
if state not in start:
result.assign_row(state, Vector.sparse(BOOL, self.size))
if state not in final:
result.assign_col(state, Vector.sparse(BOOL, self.size))
return result
| from pyformlang.regular_expression import Regex
from pygraphblas import Matrix, BOOL, Vector
class Graph:
def __init__(self):
self.size = 0
self.vertices = set()
self.start_vertices = set()
self.final_vertices = set()
self.labels = set()
self.projection_matrices = dict()
def read_graph(self, path):
with open(path, 'r') as graph_file:
lines = graph_file.readlines()
for line in lines:
edge = line.split(" ")
v_from = int(edge[0])
v_to = int(edge[2])
self.vertices.add(v_from)
self.vertices.add(v_to)
self.size = max(self.size, v_from + 1, v_to + 1)
for line in lines:
edge = line.split(" ")
v_from = int(edge[0])
v_to = int(edge[2])
label = edge[1]
if label not in self.labels:
self.labels.add(label)
self.projection_matrices[label] = Matrix.sparse(BOOL, self.size, self.size)
self.projection_matrices[label][v_from, v_to] = True
self.start_vertices = self.vertices
self.final_vertices = self.vertices
def read_regex(self, path):
with open(path, 'r') as regex_file:
regex = regex_file.readline().rstrip()
dfa = Regex(regex).to_epsilon_nfa().to_deterministic().minimize()
self.size = len(dfa.states)
state_number = 0
state_number_dict = dict()
for state in dfa.states:
state_number_dict[state] = state_number
self.vertices.add(state_number)
state_number += 1
for v_from, label, v_to in dfa._transition_function.get_edges():
if label not in self.labels:
self.projection_matrices[label] = Matrix.sparse(BOOL, self.size, self.size)
self.labels.add(label)
self.projection_matrices[label][state_number_dict[v_from], state_number_dict[v_to]] = True
for state in dfa.start_states:
self.start_vertices.add(state_number_dict[state])
for state in dfa.final_states:
self.final_vertices.add(state_number_dict[state])
def intersect(self, graph):
intersection = Graph()
intersection.size = self.size * graph.size
for label in self.labels:
if label in graph.labels:
intersection.labels.add(label)
intersection.projection_matrices[label] = self.projection_matrices[label].kronecker(
graph.projection_matrices[label])
for st1 in self.vertices:
for st2 in graph.vertices:
intersection_state = st1 * graph.size + st2
intersection.vertices.add(intersection_state)
if st1 in self.start_vertices and st2 in graph.start_vertices:
intersection.start_vertices.add(intersection_state)
if st1 in self.final_vertices and st2 in graph.final_vertices:
intersection.final_vertices.add(intersection_state)
return intersection
def transitive_closure_square(self):
closure = Matrix.sparse(BOOL, self.size, self.size)
for label in self.labels:
closure += self.projection_matrices[label]
prev_nvals = -1
while prev_nvals != closure.nvals:
prev_nvals = closure.nvals
closure += closure @ closure
return closure
def transitive_closure_mul(self):
closure = Matrix.sparse(BOOL, self.size, self.size)
for label in self.labels:
closure += self.projection_matrices[label]
dup = closure.dup()
prev_nvals = -1
while prev_nvals != closure.nvals:
prev_nvals = closure.nvals
closure += dup @ closure
return closure
def reachable_with_start_states(self, start):
result = self.transitive_closure_square()
for state in range(self.size):
if state not in start:
result.assign_row(state, Vector.sparse(BOOL, self.size))
return result
def reachable_with_start_and_final_states(self, start, final):
result = self.transitive_closure_square()
for state in range(self.size):
if state not in start:
result.assign_row(state, Vector.sparse(BOOL, self.size))
if state not in final:
result.assign_col(state, Vector.sparse(BOOL, self.size))
return result | none | 1 | 2.737684 | 3 | |
DeductExpenditureScreenStringsGerman.py | TauOmicronMu/Y13Computing | 0 | 6619067 | <reponame>TauOmicronMu/Y13Computing<gh_stars>0
# -*- coding: utf-8 -*-
#DeductExpenditureScreenStringsGerman
DEDUCT_EXPENDITURE_HELP_TEXT = u"Hier können Sie die Ausgaben bei den Gesamtausgaben abziehen."
ENTER_ADMIN_PASS_TEXT = u"Administrator-Kennwort: "
EXPENDITURE_ONE_TEXT = u"Geben Sie Ausgaben abziehen: "
EXPENDITURE_TWO_TEXT = u"Geben Sie Ausgaben: "
SUBMIT_BUTTON_TEXT = u"Senden"
| # -*- coding: utf-8 -*-
#DeductExpenditureScreenStringsGerman
DEDUCT_EXPENDITURE_HELP_TEXT = u"Hier können Sie die Ausgaben bei den Gesamtausgaben abziehen."
ENTER_ADMIN_PASS_TEXT = u"Administrator-Kennwort: "
EXPENDITURE_ONE_TEXT = u"Geben Sie Ausgaben abziehen: "
EXPENDITURE_TWO_TEXT = u"Geben Sie Ausgaben: "
SUBMIT_BUTTON_TEXT = u"Senden" | en | 0.699284 | # -*- coding: utf-8 -*- #DeductExpenditureScreenStringsGerman | 1.096887 | 1 |
src/coop_assembly/__init__.py | createchaos/coop_assembly | 3 | 6619068 | """
*************
coop_assembly
*************
.. currentmodule:: coop_assembly
.. toctree::
:maxdepth: 2
coop_assembly.data_structure
coop_assembly.geometry_generation
coop_assembly.help_functions
"""
# coop_assembly.assembly_info_generation
# coop_assembly.choreo_interface
from __future__ import print_function
from .__version__ import __author__, __author_email__, __copyright__, \
__description__, __license__, __title__, __url__, __version__
import os
HERE = os.path.dirname(__file__)
DATA = os.path.abspath(os.path.join(HERE, 'data'))
def _find_resource(filename):
filename = filename.strip('/')
return os.path.abspath(os.path.join(DATA, filename))
def get_data(filename):
return _find_resource(filename)
__all__ = [
'__author__', '__author_email__', '__copyright__', '__description__',
'__license__', '__title__', '__url__', '__version__', 'get_data',
]
| """
*************
coop_assembly
*************
.. currentmodule:: coop_assembly
.. toctree::
:maxdepth: 2
coop_assembly.data_structure
coop_assembly.geometry_generation
coop_assembly.help_functions
"""
# coop_assembly.assembly_info_generation
# coop_assembly.choreo_interface
from __future__ import print_function
from .__version__ import __author__, __author_email__, __copyright__, \
__description__, __license__, __title__, __url__, __version__
import os
HERE = os.path.dirname(__file__)
DATA = os.path.abspath(os.path.join(HERE, 'data'))
def _find_resource(filename):
filename = filename.strip('/')
return os.path.abspath(os.path.join(DATA, filename))
def get_data(filename):
return _find_resource(filename)
__all__ = [
'__author__', '__author_email__', '__copyright__', '__description__',
'__license__', '__title__', '__url__', '__version__', 'get_data',
]
| en | 0.342959 | ************* coop_assembly ************* .. currentmodule:: coop_assembly .. toctree:: :maxdepth: 2 coop_assembly.data_structure coop_assembly.geometry_generation coop_assembly.help_functions # coop_assembly.assembly_info_generation # coop_assembly.choreo_interface | 1.654006 | 2 |
EstruturaDeDecisao/exercicio10.py | Nicolas-Wursthorn/exercicios-python-brasil | 0 | 6619069 | # Faça um Programa que pergunte em que turno você estuda. Peça para digitar M-matutino ou v-Vespertino ou n-Noturno. Imprima a mensagem "Bom dia", "Boa Tarde" ou "Boa Noite" ou "Valor Inválido!" conforme o caso.
turno = input("Em qual turno você estuda?\n M - Matutino, V - Vespertino ou N - Noturno: ")
if turno == "M":
print("Bom Dia!")
elif turno == "V":
print("Boa Tarde!")
elif turno == "N":
print("Boa Noite!")
else:
print("Valor Inválido!") | # Faça um Programa que pergunte em que turno você estuda. Peça para digitar M-matutino ou v-Vespertino ou n-Noturno. Imprima a mensagem "Bom dia", "Boa Tarde" ou "Boa Noite" ou "Valor Inválido!" conforme o caso.
turno = input("Em qual turno você estuda?\n M - Matutino, V - Vespertino ou N - Noturno: ")
if turno == "M":
print("Bom Dia!")
elif turno == "V":
print("Boa Tarde!")
elif turno == "N":
print("Boa Noite!")
else:
print("Valor Inválido!") | pt | 0.967836 | # Faça um Programa que pergunte em que turno você estuda. Peça para digitar M-matutino ou v-Vespertino ou n-Noturno. Imprima a mensagem "Bom dia", "Boa Tarde" ou "Boa Noite" ou "Valor Inválido!" conforme o caso. | 3.947404 | 4 |
entity2uml/drawer.py | guchengxi1994/dev-tool-for-python | 0 | 6619070 | """
shapes can be found here: http://www.graphviz.org/doc/info/shapes.html
"""
__shapes__ = {
'entity':"rectangle",
'relation':"diamond",
'attributes':'ellipse'
}
__formats__ = [
"jpg",
"bmp",
"png"
]
# map <str,style> style is graphviz line style
# http://www.graphviz.org/doc/info/colors.html
__relations__ = {
"one2one":{'color':'red'},
"many2one":{'color':'green'},
"many2many":{'color':'blue'},
"one2many":{'color':'gold'},
"extend":{'color':'red'}
}
__engines__ = [
"dot",# 默认布局方式,主要用于有向图
"neato", # 主要用于无向图
"twopi" ,# 主要用于径向布局
"circo" ,# 圆环布局
"fdp",# 主要用于无向图
"sfdp" , # 主要绘制较大的无向图
"patchwork",# 主要用于树哈希图(tree map)
]
from typing import TypeVar
Diagram = TypeVar('Diagram',type,tuple,list)
__prefix__ = "{"
__suffix__ = "}"
__sep__ = "|{}" | """
shapes can be found here: http://www.graphviz.org/doc/info/shapes.html
"""
__shapes__ = {
'entity':"rectangle",
'relation':"diamond",
'attributes':'ellipse'
}
__formats__ = [
"jpg",
"bmp",
"png"
]
# map <str,style> style is graphviz line style
# http://www.graphviz.org/doc/info/colors.html
__relations__ = {
"one2one":{'color':'red'},
"many2one":{'color':'green'},
"many2many":{'color':'blue'},
"one2many":{'color':'gold'},
"extend":{'color':'red'}
}
__engines__ = [
"dot",# 默认布局方式,主要用于有向图
"neato", # 主要用于无向图
"twopi" ,# 主要用于径向布局
"circo" ,# 圆环布局
"fdp",# 主要用于无向图
"sfdp" , # 主要绘制较大的无向图
"patchwork",# 主要用于树哈希图(tree map)
]
from typing import TypeVar
Diagram = TypeVar('Diagram',type,tuple,list)
__prefix__ = "{"
__suffix__ = "}"
__sep__ = "|{}" | zh | 0.811051 | shapes can be found here: http://www.graphviz.org/doc/info/shapes.html # map <str,style> style is graphviz line style # http://www.graphviz.org/doc/info/colors.html # 默认布局方式,主要用于有向图 # 主要用于无向图 # 主要用于径向布局 # 圆环布局 # 主要用于无向图 # 主要绘制较大的无向图 # 主要用于树哈希图(tree map) | 2.681891 | 3 |
chinup/middleware.py | smbapps/chinup | 0 | 6619071 | from __future__ import absolute_import, unicode_literals
import logging
from .lowlevel import batches
from .queue import delete_queues
from .conf import settings
logger = logging.getLogger(__name__)
class ChinupMiddleware(object):
def process_request(self, request):
delete_queues()
def process_response(self, request, response):
if settings.DEBUG and batches:
logger.info("%d requests in %d batches",
sum(len(b) for b in batches),
len(batches))
batches[:] = []
return response
| from __future__ import absolute_import, unicode_literals
import logging
from .lowlevel import batches
from .queue import delete_queues
from .conf import settings
logger = logging.getLogger(__name__)
class ChinupMiddleware(object):
def process_request(self, request):
delete_queues()
def process_response(self, request, response):
if settings.DEBUG and batches:
logger.info("%d requests in %d batches",
sum(len(b) for b in batches),
len(batches))
batches[:] = []
return response
| none | 1 | 2.159363 | 2 | |
day07/DictFunction.py | DreamLose/python- | 1 | 6619072 | # 字典 keys() values() items() get() update()
dict
info = {"k1":"v1","k2":"v2"}
print(info)
info.keys()
info.values()
del info["k1"]
print(info)
# 获取key,value
for k,y in info.items():
print(k,y)
#根据序列创建字典,指定统一值
v = dict.fromkeys(['ke','323','999'],[123,"ewe"])
print(v)
# 根据key 获取值,key不存在时可以指定默认值
s = v.get("ke",23232)
print(s)
# 删除并获取值,没有对应的key返回默认值
s = v.pop("ke3",90)
print(s)
# 设置值,已经存在,不设置并获取当前key对应的值,不存在key则添加对应值并获取
v.setdefault("ke",1212)
print(v)
v.update({'ke':"232323","kk":32323})
print(v)
v.update(ke=1212,k3=33223,kk=323)
print(v)
| # 字典 keys() values() items() get() update()
dict
info = {"k1":"v1","k2":"v2"}
print(info)
info.keys()
info.values()
del info["k1"]
print(info)
# 获取key,value
for k,y in info.items():
print(k,y)
#根据序列创建字典,指定统一值
v = dict.fromkeys(['ke','323','999'],[123,"ewe"])
print(v)
# 根据key 获取值,key不存在时可以指定默认值
s = v.get("ke",23232)
print(s)
# 删除并获取值,没有对应的key返回默认值
s = v.pop("ke3",90)
print(s)
# 设置值,已经存在,不设置并获取当前key对应的值,不存在key则添加对应值并获取
v.setdefault("ke",1212)
print(v)
v.update({'ke':"232323","kk":32323})
print(v)
v.update(ke=1212,k3=33223,kk=323)
print(v)
| zh | 0.888112 | # 字典 keys() values() items() get() update() # 获取key,value #根据序列创建字典,指定统一值 # 根据key 获取值,key不存在时可以指定默认值 # 删除并获取值,没有对应的key返回默认值 # 设置值,已经存在,不设置并获取当前key对应的值,不存在key则添加对应值并获取 | 3.905231 | 4 |
docs/assets/code/rules/use_example_rule_003.py | dotX12/waio | 24 | 6619073 | @dp.message_handler(len_more=12)
async def text_len(message: Message):
await message.answer(f'msg len: {len(message.text)}') | @dp.message_handler(len_more=12)
async def text_len(message: Message):
await message.answer(f'msg len: {len(message.text)}') | none | 1 | 2.337762 | 2 | |
tests/utils.py | aexeagmbh/cfn-lint-rules | 1 | 6619074 | <reponame>aexeagmbh/cfn-lint-rules
from pathlib import Path
from typing import List, Tuple, Type
import cfnlint.decode.cfn_yaml
from cfnlint.core import get_rules
from cfnlint.rules import CloudFormationLintRule
from cfnlint.runner import Runner
ExpectedError = Tuple[int, Type[CloudFormationLintRule], str]
BAD_TEMPLATE_FIXTURES_PATH = Path("tests/bad").resolve()
GOOD_TEMPLATE_FIXTURES_PATH = Path("tests/good").resolve()
def assert_all_matches(
filename: str, expected_errors: List[ExpectedError], region: str = "us-east-1"
) -> None:
regions = [region]
template = cfnlint.decode.cfn_yaml.load(filename)
rules = get_rules(
["cfn_lint_ax.rules"],
ignore_rules=[],
include_rules=["I"],
include_experimental=True,
)
runner = Runner(rules=rules, filename=filename, template=template, regions=regions)
runner.transform()
errs = runner.run()
for expected_error in expected_errors:
line_number = expected_error[0]
lint_rule_class = expected_error[1]
message = expected_error[2]
lint_rule_class_name = (
f"{lint_rule_class.__module__}.{lint_rule_class.__name__}"
)
if lint_rule_class_name.startswith("rules."):
lint_rule_class_name = lint_rule_class_name[len("rules.") :]
for errs_idx, match in enumerate(errs):
match_rule_class = match.rule.__class__
match_rule_class_name = (
f"{match_rule_class.__module__}.{match_rule_class.__name__}"
)
if (
match.linenumber == line_number
and match_rule_class_name == lint_rule_class_name
and match.message == message
):
del errs[errs_idx]
break
else:
assert False, f"{line_number} - {lint_rule_class} - {message} not in errs"
assert len(errs) == 0, errs
| from pathlib import Path
from typing import List, Tuple, Type
import cfnlint.decode.cfn_yaml
from cfnlint.core import get_rules
from cfnlint.rules import CloudFormationLintRule
from cfnlint.runner import Runner
ExpectedError = Tuple[int, Type[CloudFormationLintRule], str]
BAD_TEMPLATE_FIXTURES_PATH = Path("tests/bad").resolve()
GOOD_TEMPLATE_FIXTURES_PATH = Path("tests/good").resolve()
def assert_all_matches(
filename: str, expected_errors: List[ExpectedError], region: str = "us-east-1"
) -> None:
regions = [region]
template = cfnlint.decode.cfn_yaml.load(filename)
rules = get_rules(
["cfn_lint_ax.rules"],
ignore_rules=[],
include_rules=["I"],
include_experimental=True,
)
runner = Runner(rules=rules, filename=filename, template=template, regions=regions)
runner.transform()
errs = runner.run()
for expected_error in expected_errors:
line_number = expected_error[0]
lint_rule_class = expected_error[1]
message = expected_error[2]
lint_rule_class_name = (
f"{lint_rule_class.__module__}.{lint_rule_class.__name__}"
)
if lint_rule_class_name.startswith("rules."):
lint_rule_class_name = lint_rule_class_name[len("rules.") :]
for errs_idx, match in enumerate(errs):
match_rule_class = match.rule.__class__
match_rule_class_name = (
f"{match_rule_class.__module__}.{match_rule_class.__name__}"
)
if (
match.linenumber == line_number
and match_rule_class_name == lint_rule_class_name
and match.message == message
):
del errs[errs_idx]
break
else:
assert False, f"{line_number} - {lint_rule_class} - {message} not in errs"
assert len(errs) == 0, errs | none | 1 | 2.26894 | 2 | |
tests/test_tapioca_github.py | gtsalles/tapioca-github | 7 | 6619075 | # -*- coding: utf-8 -*-
import unittest
import responses
from tapioca_github import Github
from tapioca_github.resource_mapping import RESOURCE_MAPPING
from tests.fixtures import SINGLE_GIST_PAYLOAD
class TestTapioca(unittest.TestCase):
def setUp(self):
self.wrapper = Github(access_token='access_token_token')
def test_resource_access(self):
with responses.RequestsMock(assert_all_requests_are_fired=True) as rsps:
test_gist_id = 1
rsps.add(
responses.GET,
'https://api.github.com/' +
RESOURCE_MAPPING['gists_single']['resource'].format(id=test_gist_id),
json=SINGLE_GIST_PAYLOAD, status=201)
resource = self.wrapper.gists_single(id=test_gist_id).get()
self.assertEqual(resource().data, SINGLE_GIST_PAYLOAD)
if __name__ == '__main__':
unittest.main()
| # -*- coding: utf-8 -*-
import unittest
import responses
from tapioca_github import Github
from tapioca_github.resource_mapping import RESOURCE_MAPPING
from tests.fixtures import SINGLE_GIST_PAYLOAD
class TestTapioca(unittest.TestCase):
def setUp(self):
self.wrapper = Github(access_token='access_token_token')
def test_resource_access(self):
with responses.RequestsMock(assert_all_requests_are_fired=True) as rsps:
test_gist_id = 1
rsps.add(
responses.GET,
'https://api.github.com/' +
RESOURCE_MAPPING['gists_single']['resource'].format(id=test_gist_id),
json=SINGLE_GIST_PAYLOAD, status=201)
resource = self.wrapper.gists_single(id=test_gist_id).get()
self.assertEqual(resource().data, SINGLE_GIST_PAYLOAD)
if __name__ == '__main__':
unittest.main()
| en | 0.769321 | # -*- coding: utf-8 -*- | 2.489276 | 2 |
setup.py | gisce/apply_pr | 3 | 6619076 | from setuptools import setup, find_packages
with open('requirements.txt', 'r') as f:
INSTALL_REQUIRES = f.readlines()
setup(
name='apply_pr',
version='2.11.3',
packages=find_packages(),
url='https://github.com/gisce/apply_pr',
license='MIT',
author='GISCE-TI, S.L.',
author_email='<EMAIL>',
description='Apply Pull Requests from GitHub',
entry_points='''
[console_scripts]
sastre=apply_pr.cli:sastre
apply_pr=apply_pr.cli:deprecated
''',
install_requires=INSTALL_REQUIRES
)
| from setuptools import setup, find_packages
with open('requirements.txt', 'r') as f:
INSTALL_REQUIRES = f.readlines()
setup(
name='apply_pr',
version='2.11.3',
packages=find_packages(),
url='https://github.com/gisce/apply_pr',
license='MIT',
author='GISCE-TI, S.L.',
author_email='<EMAIL>',
description='Apply Pull Requests from GitHub',
entry_points='''
[console_scripts]
sastre=apply_pr.cli:sastre
apply_pr=apply_pr.cli:deprecated
''',
install_requires=INSTALL_REQUIRES
)
| ms | 0.113941 | [console_scripts] sastre=apply_pr.cli:sastre apply_pr=apply_pr.cli:deprecated | 1.414892 | 1 |
src/day16.py | blu3r4y/AdventOfCode2018 | 2 | 6619077 | <filename>src/day16.py<gh_stars>1-10
# Advent of Code 2018, Day 16
# (c) blu3r4y
from collections import namedtuple
from parse import parse
OPERATIONS = ['addr', 'addi', 'mulr', 'muli', 'banr', 'bani', 'borr', 'bori',
'setr', 'seti', 'gtir', 'gtri', 'gtrr', 'eqir', 'eqri', 'eqrr']
Observation = namedtuple("Observation", ["instruction", "before", "after"])
def part1(observations):
three_or_more = 0
for obsv in observations:
# execute all possible candidates
num_matches = 0
for op in OPERATIONS:
if obsv.after == execute(obsv.instruction, obsv.before, op):
num_matches += 1
# count observations with three or more possible operations
if num_matches >= 3:
three_or_more += 1
return three_or_more
def part2(observations, program):
# store possible candidates for every opcode
operations = {i: set(OPERATIONS) for i in range(len(OPERATIONS))}
for obsv in observations:
matching_operations = set()
opcode = obsv.instruction[0]
# execute all possible candidates
for op in operations[opcode]:
if obsv.after == execute(obsv.instruction, obsv.before, op):
matching_operations.add(op)
# keep only the matching operations
operations[opcode] = matching_operations
# if we uniquely identified an operation ...
if len(matching_operations) == 1:
unique_op = next(iter(matching_operations))
# ... remove it from the other mappings
for key in set(operations.keys()) - {opcode}:
operations[key].discard(unique_op)
# map set values to scalar
operations = {i: ops.pop() for i, ops in operations.items()}
# interpret the program
reg = [0, 0, 0, 0]
for instruction in program:
reg = execute(instruction, reg, operations[instruction[0]])
return reg[0]
def execute(instruction, reg, op):
_, a, b, c = instruction
reg = list(reg) # copy register
if op == 'addr':
reg[c] = reg[a] + reg[b]
elif op == 'addi':
reg[c] = reg[a] + b
elif op == 'mulr':
reg[c] = reg[a] * reg[b]
elif op == 'muli':
reg[c] = reg[a] * b
elif op == 'banr':
reg[c] = reg[a] & reg[b]
elif op == 'bani':
reg[c] = reg[a] & b
elif op == 'borr':
reg[c] = reg[a] | reg[b]
elif op == 'bori':
reg[c] = reg[a] | b
elif op == 'setr':
reg[c] = reg[a]
elif op == 'seti':
reg[c] = a
elif op == 'gtir':
reg[c] = 1 if a > reg[b] else 0
elif op == 'gtri':
reg[c] = 1 if reg[a] > b else 0
elif op == 'gtrr':
reg[c] = 1 if reg[a] > reg[b] else 0
elif op == 'eqir':
reg[c] = 1 if a == reg[b] else 0
elif op == 'eqri':
reg[c] = 1 if reg[a] == b else 0
elif op == 'eqrr':
reg[c] = 1 if reg[a] == reg[b] else 0
return reg
def _parse(lines):
observations, program, i = [], [], 0
# parse observations
while i < len(lines):
before = parse("Before: [{:d}, {:d}, {:d}, {:d}]", lines[i].strip())
instruction = parse("{:d} {:d} {:d} {:d}", lines[i + 1].strip())
after = parse("After: [{:d}, {:d}, {:d}, {:d}]", lines[i + 2].strip())
i += 4
if not (before and after and instruction):
break
observations.append(Observation([*instruction], [*before], [*after]))
# parse program
for line in lines[i - 2:]:
program.append(list(map(int, line.strip().split(' '))))
return observations, program
if __name__ == "__main__":
print(part1(_parse(open(r"../assets/day16.txt").readlines())[0]))
print(part2(*_parse(open(r"../assets/day16.txt").readlines())))
| <filename>src/day16.py<gh_stars>1-10
# Advent of Code 2018, Day 16
# (c) blu3r4y
from collections import namedtuple
from parse import parse
OPERATIONS = ['addr', 'addi', 'mulr', 'muli', 'banr', 'bani', 'borr', 'bori',
'setr', 'seti', 'gtir', 'gtri', 'gtrr', 'eqir', 'eqri', 'eqrr']
Observation = namedtuple("Observation", ["instruction", "before", "after"])
def part1(observations):
three_or_more = 0
for obsv in observations:
# execute all possible candidates
num_matches = 0
for op in OPERATIONS:
if obsv.after == execute(obsv.instruction, obsv.before, op):
num_matches += 1
# count observations with three or more possible operations
if num_matches >= 3:
three_or_more += 1
return three_or_more
def part2(observations, program):
# store possible candidates for every opcode
operations = {i: set(OPERATIONS) for i in range(len(OPERATIONS))}
for obsv in observations:
matching_operations = set()
opcode = obsv.instruction[0]
# execute all possible candidates
for op in operations[opcode]:
if obsv.after == execute(obsv.instruction, obsv.before, op):
matching_operations.add(op)
# keep only the matching operations
operations[opcode] = matching_operations
# if we uniquely identified an operation ...
if len(matching_operations) == 1:
unique_op = next(iter(matching_operations))
# ... remove it from the other mappings
for key in set(operations.keys()) - {opcode}:
operations[key].discard(unique_op)
# map set values to scalar
operations = {i: ops.pop() for i, ops in operations.items()}
# interpret the program
reg = [0, 0, 0, 0]
for instruction in program:
reg = execute(instruction, reg, operations[instruction[0]])
return reg[0]
def execute(instruction, reg, op):
_, a, b, c = instruction
reg = list(reg) # copy register
if op == 'addr':
reg[c] = reg[a] + reg[b]
elif op == 'addi':
reg[c] = reg[a] + b
elif op == 'mulr':
reg[c] = reg[a] * reg[b]
elif op == 'muli':
reg[c] = reg[a] * b
elif op == 'banr':
reg[c] = reg[a] & reg[b]
elif op == 'bani':
reg[c] = reg[a] & b
elif op == 'borr':
reg[c] = reg[a] | reg[b]
elif op == 'bori':
reg[c] = reg[a] | b
elif op == 'setr':
reg[c] = reg[a]
elif op == 'seti':
reg[c] = a
elif op == 'gtir':
reg[c] = 1 if a > reg[b] else 0
elif op == 'gtri':
reg[c] = 1 if reg[a] > b else 0
elif op == 'gtrr':
reg[c] = 1 if reg[a] > reg[b] else 0
elif op == 'eqir':
reg[c] = 1 if a == reg[b] else 0
elif op == 'eqri':
reg[c] = 1 if reg[a] == b else 0
elif op == 'eqrr':
reg[c] = 1 if reg[a] == reg[b] else 0
return reg
def _parse(lines):
observations, program, i = [], [], 0
# parse observations
while i < len(lines):
before = parse("Before: [{:d}, {:d}, {:d}, {:d}]", lines[i].strip())
instruction = parse("{:d} {:d} {:d} {:d}", lines[i + 1].strip())
after = parse("After: [{:d}, {:d}, {:d}, {:d}]", lines[i + 2].strip())
i += 4
if not (before and after and instruction):
break
observations.append(Observation([*instruction], [*before], [*after]))
# parse program
for line in lines[i - 2:]:
program.append(list(map(int, line.strip().split(' '))))
return observations, program
if __name__ == "__main__":
print(part1(_parse(open(r"../assets/day16.txt").readlines())[0]))
print(part2(*_parse(open(r"../assets/day16.txt").readlines())))
| en | 0.718987 | # Advent of Code 2018, Day 16 # (c) blu3r4y # execute all possible candidates # count observations with three or more possible operations # store possible candidates for every opcode # execute all possible candidates # keep only the matching operations # if we uniquely identified an operation ... # ... remove it from the other mappings # map set values to scalar # interpret the program # copy register # parse observations # parse program | 3.306384 | 3 |
dcms/comment/models.py | yifei-fu/dcms | 1 | 6619078 | from django.contrib.contenttypes.fields import GenericForeignKey
from django.contrib.contenttypes.models import ContentType
from django.core.exceptions import ValidationError
from django.db import models
from django.utils.translation import ugettext_lazy as _
from config.settings import TEXT_PREVIEW_LEN
from content.models import ContentMetadata
class Comment(ContentMetadata):
content = models.TextField()
replied_to = models.ForeignKey("self", verbose_name=_("Parent comment being replied to"), on_delete=models.CASCADE,
null=True, blank=True, related_name='replies')
content_type = models.ForeignKey(ContentType, on_delete=models.CASCADE)
object_id = models.PositiveIntegerField()
content_object = GenericForeignKey('content_type', 'object_id', )
@property
def content_preview(self):
return self.content[:TEXT_PREVIEW_LEN]
def __str__(self):
return 'Comment "{}" by {}'.format(self.content_preview, self.author)
# Validates that replied_to is either None or a comment to the same object.
# attrs is either a Comment instance or data in CommentSerializer
@classmethod
def validate_replied_to(cls, attrs):
replied_to = getattr(attrs, 'replied_to', None) or attrs.get('replied_to')
if replied_to:
content_type = getattr(attrs, 'content_type', None) or attrs.get('content_type')
object_id = getattr(attrs, 'object_id', None) or attrs.get('object_id')
if (replied_to.content_type != content_type or replied_to.object_id != object_id):
raise ValidationError("replied_to is either None or a comment to the same object")
def clean(self):
self.validate_replied_to(self)
| from django.contrib.contenttypes.fields import GenericForeignKey
from django.contrib.contenttypes.models import ContentType
from django.core.exceptions import ValidationError
from django.db import models
from django.utils.translation import ugettext_lazy as _
from config.settings import TEXT_PREVIEW_LEN
from content.models import ContentMetadata
class Comment(ContentMetadata):
content = models.TextField()
replied_to = models.ForeignKey("self", verbose_name=_("Parent comment being replied to"), on_delete=models.CASCADE,
null=True, blank=True, related_name='replies')
content_type = models.ForeignKey(ContentType, on_delete=models.CASCADE)
object_id = models.PositiveIntegerField()
content_object = GenericForeignKey('content_type', 'object_id', )
@property
def content_preview(self):
return self.content[:TEXT_PREVIEW_LEN]
def __str__(self):
return 'Comment "{}" by {}'.format(self.content_preview, self.author)
# Validates that replied_to is either None or a comment to the same object.
# attrs is either a Comment instance or data in CommentSerializer
@classmethod
def validate_replied_to(cls, attrs):
replied_to = getattr(attrs, 'replied_to', None) or attrs.get('replied_to')
if replied_to:
content_type = getattr(attrs, 'content_type', None) or attrs.get('content_type')
object_id = getattr(attrs, 'object_id', None) or attrs.get('object_id')
if (replied_to.content_type != content_type or replied_to.object_id != object_id):
raise ValidationError("replied_to is either None or a comment to the same object")
def clean(self):
self.validate_replied_to(self)
| en | 0.828696 | # Validates that replied_to is either None or a comment to the same object. # attrs is either a Comment instance or data in CommentSerializer | 2.144009 | 2 |
Commons/vocab.py | quilan78/MSC_project | 0 | 6619079 | <reponame>quilan78/MSC_project<gh_stars>0
import tensorflow as tf
import numpy as np
import json
from read_data import *
UNKNOWN = "<UNKNOWN>"
class Vocab:
def __init__(self, path="../../Data/finished_files/"):
self.path = path
def LoadVocab(self, filename="vocab", max_size=-1):
vocab_word_to_id = {}
vocab_id_to_words = {}
count = 0
unknown_word = UNKNOWN
vocab_id_to_words[count] = unknown_word
vocab_word_to_id[unknown_word] = count
count += 1
fill = FILL
vocab_id_to_words[count] = fill
vocab_word_to_id[fill] = count
count += 1
start_decode = START_DECODE
vocab_id_to_words[count] = start_decode
vocab_word_to_id[start_decode] = count
count += 1
stop_decode = STOP_DECODE
vocab_id_to_words[count] = stop_decode
vocab_word_to_id[stop_decode] = count
count += 1
self.start_decode_token = start_decode
self.stop_decode_token = stop_decode
self.start_decode_id = vocab_word_to_id[start_decode]
self.stop_decode_id = vocab_word_to_id[stop_decode]
with open(self.path+filename, 'r', encoding="utf8") as file:
for line in file:
word = line.split()[0]
vocab_word_to_id[word] = count
vocab_id_to_words[count] = word
count += 1
if count == max_size:
break
self.vocab_size = count
self.vocab_word_to_id = vocab_word_to_id
self.vocab_id_to_words = vocab_id_to_words
return count
def TranslateTextBatchesWithOOV(self, batches):
vocab_word_to_id = self.vocab_word_to_id
oov_words_batches = []
translated = []
max_oov_batches = []
for i in range(len(batches)):
articles, oov_words, max_oov = self.TranslateBatchArticleWithOOV(batches[i])
translated.append(articles)
oov_words_batches.append(oov_words)
max_oov_batches.append(max_oov)
return np.array(translated, dtype=np.int32), oov_words_batches, max_oov_batches
def TranslateSummaryBatchesWithOOV(self, batches, oov_words_batches):
vocab_word_to_id = self.vocab_word_to_id
translated = []
for i in range(len(batches)):
summaries = self.TranslateBatchSummaryWithOOV(batches[i], oov_words_batches[i])
translated.append(summaries)
return np.array(translated, dtype=np.int32)
def TranslateBatchArticleWithOOV(self, articles):
oov_words = []
vocab_word_to_id = self.vocab_word_to_id
translated = np.zeros(articles.shape)
for i in range(len(articles)):
oov_words.append([])
for j in range(len(articles[0])):
if articles[i][j] in vocab_word_to_id:
translated[i,j] = vocab_word_to_id[articles[i][j]]
else:
if articles[i][j] not in oov_words[i]:
oov_words[i].append(articles[i][j])
id_ = oov_words[i].index(articles[i][j])
translated[i,j] = self.vocab_size + id_
return translated, oov_words, np.max([len(x) for x in oov_words])
def TranslateBatchSummaryWithOOV(self, summary, oov_words):
vocab_word_to_id = self.vocab_word_to_id
translated = np.zeros(summary.shape)
for i in range(len(summary)):
for j in range(len(summary[0])):
if summary[i][j] in vocab_word_to_id:
translated[i,j] = vocab_word_to_id[summary[i][j]]
else:
if summary[i][j] in oov_words[i]:
id_ = oov_words[i].index(summary[i][j]) + self.vocab_size
else:
id_ = vocab_word_to_id[UNKNOWN]
translated[i,j] = id_
return translated
def TranslateBatches(self, batches):
vocab_word_to_id = self.vocab_word_to_id
translated = np.zeros(batches.shape)
for i in range(len(batches)):
for j in range(len(batches[0])):
for k in range(len(batches[0][0])):
if batches[i][j][k] in vocab_word_to_id:
translated[i][j][k] = vocab_word_to_id[batches[i][j][k]]
else:
translated[i][j][k] = vocab_word_to_id[UNKNOWN]
return translated
def TransalteAnswer(self, sentence):
vocab_id_to_words = self.vocab_id_to_words
translated = []
for i in range(len(sentence)):
if sentence[i] in vocab_id_to_words:
translated.append(vocab_id_to_words[sentence[i]])
else:
translated.append([UNKNOWN])
return np.array(translated)
def TransalteSentence(self, sentence):
vocab_word_to_id = self.vocab_word_to_id
translated = []
for i in range(len(sentence)):
if sentence[i] in vocab_word_to_id:
translated.append(vocab_word_to_id[sentence[i]])
else:
translated.append(vocab_word_to_id[UNKNOWN])
return np.array(translated)
if __name__ == "__main__":
vocab = Vocab()
vocab.LoadVocab()
with open("vocab.json", 'w') as outfile:
json.dump(vocab.vocab_word_to_id, outfile) | import tensorflow as tf
import numpy as np
import json
from read_data import *
UNKNOWN = "<UNKNOWN>"
class Vocab:
def __init__(self, path="../../Data/finished_files/"):
self.path = path
def LoadVocab(self, filename="vocab", max_size=-1):
vocab_word_to_id = {}
vocab_id_to_words = {}
count = 0
unknown_word = UNKNOWN
vocab_id_to_words[count] = unknown_word
vocab_word_to_id[unknown_word] = count
count += 1
fill = FILL
vocab_id_to_words[count] = fill
vocab_word_to_id[fill] = count
count += 1
start_decode = START_DECODE
vocab_id_to_words[count] = start_decode
vocab_word_to_id[start_decode] = count
count += 1
stop_decode = STOP_DECODE
vocab_id_to_words[count] = stop_decode
vocab_word_to_id[stop_decode] = count
count += 1
self.start_decode_token = start_decode
self.stop_decode_token = stop_decode
self.start_decode_id = vocab_word_to_id[start_decode]
self.stop_decode_id = vocab_word_to_id[stop_decode]
with open(self.path+filename, 'r', encoding="utf8") as file:
for line in file:
word = line.split()[0]
vocab_word_to_id[word] = count
vocab_id_to_words[count] = word
count += 1
if count == max_size:
break
self.vocab_size = count
self.vocab_word_to_id = vocab_word_to_id
self.vocab_id_to_words = vocab_id_to_words
return count
def TranslateTextBatchesWithOOV(self, batches):
vocab_word_to_id = self.vocab_word_to_id
oov_words_batches = []
translated = []
max_oov_batches = []
for i in range(len(batches)):
articles, oov_words, max_oov = self.TranslateBatchArticleWithOOV(batches[i])
translated.append(articles)
oov_words_batches.append(oov_words)
max_oov_batches.append(max_oov)
return np.array(translated, dtype=np.int32), oov_words_batches, max_oov_batches
def TranslateSummaryBatchesWithOOV(self, batches, oov_words_batches):
vocab_word_to_id = self.vocab_word_to_id
translated = []
for i in range(len(batches)):
summaries = self.TranslateBatchSummaryWithOOV(batches[i], oov_words_batches[i])
translated.append(summaries)
return np.array(translated, dtype=np.int32)
def TranslateBatchArticleWithOOV(self, articles):
oov_words = []
vocab_word_to_id = self.vocab_word_to_id
translated = np.zeros(articles.shape)
for i in range(len(articles)):
oov_words.append([])
for j in range(len(articles[0])):
if articles[i][j] in vocab_word_to_id:
translated[i,j] = vocab_word_to_id[articles[i][j]]
else:
if articles[i][j] not in oov_words[i]:
oov_words[i].append(articles[i][j])
id_ = oov_words[i].index(articles[i][j])
translated[i,j] = self.vocab_size + id_
return translated, oov_words, np.max([len(x) for x in oov_words])
def TranslateBatchSummaryWithOOV(self, summary, oov_words):
vocab_word_to_id = self.vocab_word_to_id
translated = np.zeros(summary.shape)
for i in range(len(summary)):
for j in range(len(summary[0])):
if summary[i][j] in vocab_word_to_id:
translated[i,j] = vocab_word_to_id[summary[i][j]]
else:
if summary[i][j] in oov_words[i]:
id_ = oov_words[i].index(summary[i][j]) + self.vocab_size
else:
id_ = vocab_word_to_id[UNKNOWN]
translated[i,j] = id_
return translated
def TranslateBatches(self, batches):
vocab_word_to_id = self.vocab_word_to_id
translated = np.zeros(batches.shape)
for i in range(len(batches)):
for j in range(len(batches[0])):
for k in range(len(batches[0][0])):
if batches[i][j][k] in vocab_word_to_id:
translated[i][j][k] = vocab_word_to_id[batches[i][j][k]]
else:
translated[i][j][k] = vocab_word_to_id[UNKNOWN]
return translated
def TransalteAnswer(self, sentence):
vocab_id_to_words = self.vocab_id_to_words
translated = []
for i in range(len(sentence)):
if sentence[i] in vocab_id_to_words:
translated.append(vocab_id_to_words[sentence[i]])
else:
translated.append([UNKNOWN])
return np.array(translated)
def TransalteSentence(self, sentence):
vocab_word_to_id = self.vocab_word_to_id
translated = []
for i in range(len(sentence)):
if sentence[i] in vocab_word_to_id:
translated.append(vocab_word_to_id[sentence[i]])
else:
translated.append(vocab_word_to_id[UNKNOWN])
return np.array(translated)
if __name__ == "__main__":
vocab = Vocab()
vocab.LoadVocab()
with open("vocab.json", 'w') as outfile:
json.dump(vocab.vocab_word_to_id, outfile) | none | 1 | 2.800214 | 3 | |
src/hmcscanner/hmcClient.py | vagfed/hmcscanner-py | 0 | 6619080 | from email.errors import NonPrintableDefect
from paramiko import SSHClient, AutoAddPolicy, ProxyCommand
from paramiko.ssh_exception import BadHostKeyException, AuthenticationException, SSHException
import logging
import socket
import os
from datetime import datetime
import uuid
logger = logging.getLogger("Main")
BUFSIZE = 10*1024
WRITETEST = str(uuid.uuid4())
class HmcClient:
"""Client to interact with HMC"""
def __init__(self, host, user, outDir, password=<PASSWORD>, ssh_key=None, connect=True,
j_host=None, j_user=None, j_password=None, j_ssh_key=None):
logger.debug(f'HMC: {user}@{host}, outDir: {outDir}, password: {<PASSWORD>}, ssh_key: {ssh_key is not None}, connect: {connect}')
self.host = host
self.user = user
self.password = password
self.ssh_key = ssh_key
self.outDir = outDir
self.j_host = j_host
self.j_user = j_user
self.j_password = <PASSWORD>
self.j_ssh_key = j_ssh_key
self.validDir = False
self.client = None
if connect:
self.client = self._connect()
try:
os.mkdir(outDir)
except FileExistsError:
if not os.path.isdir(outDir):
logger.error(f'File {outDir} exists and it is not a directory')
return
except OSError:
logger.error(f'Can not create directory {outDir}')
return
with open(os.path.join(outDir, WRITETEST), 'wt') as file:
try:
file.write(datetime.now().strftime("%Y-%m-%d %H:%M:%S"))
self.validDir = True
except Exception as e:
logger.error(f'Can not write into {outDir}')
return
os.remove(os.path.join(outDir, WRITETEST))
else:
# Do nothing!
pass
def _connect_old(self):
if self.ssh_key is not None:
logger.debug(f'{self.user}@{self.host}: try connection with ssh key only')
client = self._try_to_connect(None, self.ssh_key)
if client is not None:
return client
logger.debug(f'{self.user}@{self.host}: try connection with password only')
client = self._try_to_connect(self.password, None)
if client is None:
logger.error(f'{self.user}@{self.host}: connection failed')
return client
def _connect(self):
if self.j_host is not None:
logger.debug(f'Jump host configured: {self.j_host}')
if self.j_ssh_key is not None:
logger.debug(f'Try key {self.j_ssh_key} for jump host {self.j_user}@{self.j_host}')
jumphost = self._try_to_connect(self.j_host, self.j_user, None, self.j_ssh_key)
if jumphost is None:
logger.debug(
f'Failed key login on {self.j_user}@{self.j_host}')
if self.j_password is not None:
logger.debug(f'Try password for jump host {self.j_user}@{self.j_host}')
jumphost = self._try_to_connect(self.j_host, self.j_user, self.j_password, None)
if jumphost is None:
logger.debug(f'Failed password login on {self.j_user}@{self.j_host}. Aborting...')
return None
else:
logger.debug(f'Failed login on {self.j_user}@{self.j_host}. Aborting...')
return None
elif self.j_password is not None:
logger.debug(
f'Try password for jump host {self.j_user}@{self.j_host}')
jumphost = self._try_to_connect(
self.j_host, self.j_user, self.j_password, None)
if jumphost is None:
logger.debug(f'Failed password login on {self.j_user}@{self.j_host}. Aborting...')
return None
else:
logger.error(f'Jump host defined but no key and no password provided!')
return None
transport = jumphost.get_transport()
destination = (self.host, 22)
source = ("0.0.0.0", 0)
try:
channel = transport.open_channel("direct-tcpip", destination, source)
except Exception as e:
logger.error(f'Could not open channel to target. Exception: {e}')
return None
logger.debug(f'Starting connection from {self.j_host} to {self.host}')
else:
channel = None
if self.ssh_key is not None:
logger.debug(
f'Try key {self.ssh_key} for target host {self.user}@{self.host}, channel={channel}')
client = self._try_to_connect(self.host, self.user, None, self.ssh_key, sock=channel)
if client is None:
logger.debug(f'Failed key login on {self.user}@{self.host} sock={channel}')
if self.j_password is not None:
logger.debug(f'Try password target host {self.user}@{self.host} sock={channel}')
client = self._try_to_connect(self.host, self.user, self.password, None, sock=channel)
if client is None:
logger.debug(f'Failed login on {self.j_user}@{self.j_host} sock={channel}. Aborting...')
return None
else:
logger.debug(f'Failed logging on {self.j_user}@{self.j_host} sock={channel}. Aborting...')
return None
elif self.password is not None:
logger.debug(
f'Try password for target host {self.user}@{self.host}, channel={channel}')
client = self._try_to_connect(self.host, self.user, self.password, None, sock=channel)
if client is None:
logger.debug(
f'Failed password login on {self.user}@{self.host} sock={channel}')
logger.debug(f'Failed logging on {self.user}@{self.host} sock={channel}. Aborting...')
return None
else:
logger.error(f'Target host had no key and no password!')
return None
return client
def _try_to_connect(self, host, user, password, ssh_key, sock=None):
logger.debug(f'Start connection: {user}@{host}, password:{<PASSWORD>}, ssh_key: {ssh_key}, sock={sock}')
try:
client = SSHClient()
client.load_system_host_keys()
client.set_missing_host_key_policy(AutoAddPolicy())
client.connect(
host,
username=user,
password=password,
key_filename=ssh_key,
look_for_keys=True,
sock=sock
)
logger.debug(f'Connected to {user}@{host}')
except BadHostKeyException as exc:
logger.error(f'{user}@{host}: server’s host key could not be verified: {exc}')
return None
except AuthenticationException as exc:
logger.error(f'{user}@{host}: authentication error: {exc}')
return None
except SSHException as exc:
logger.error(f'{user}@{host}: SSH error: {exc}')
return None
except socket.error as exc:
logger.error(f'{user}@{host}: socket error: {exc}')
return None
except Exception as exc:
logger.error(f'{user}@{host}: unexpected error: {exc}')
return None
logger.debug(f'{user}@{host}: connection is successful')
return client
def isConnected(self):
return self.client != None
def close(self):
if self.client is not None:
self.client.close()
logger.debug('Connection closed')
else:
logger.debug('Connection was not open')
self.client = None
def runCommand(self, command, fileName):
if self.client is None:
logger.error('Client was not connected. Command not executed')
return
if not self.validDir:
logger.error('No valid output dir. Command not executed')
try:
logger.debug(f'{command} -> {fileName}')
transport = self.client.get_transport()
session = transport.open_session()
session.set_combine_stderr(True)
with open(os.path.join(self.outDir, fileName), 'wb') as file:
session.exec_command('LANG=C ' + command)
data = session.recv(BUFSIZE)
while len(data) > 0:
file.write(data)
data = session.recv(BUFSIZE)
rc = session.recv_exit_status()
logger.debug(f'Return code: {rc}')
#if rc != 0:
# logger.warning(f'RC={rc} for {self.user}@{self.host}:{command}')
except SSHException as exc:
logger.error(f'SSH error: {exc}')
return
except (OSError, IOError) as e:
logger.error(f'I/O error: {e}')
| from email.errors import NonPrintableDefect
from paramiko import SSHClient, AutoAddPolicy, ProxyCommand
from paramiko.ssh_exception import BadHostKeyException, AuthenticationException, SSHException
import logging
import socket
import os
from datetime import datetime
import uuid
logger = logging.getLogger("Main")
BUFSIZE = 10*1024
WRITETEST = str(uuid.uuid4())
class HmcClient:
"""Client to interact with HMC"""
def __init__(self, host, user, outDir, password=<PASSWORD>, ssh_key=None, connect=True,
j_host=None, j_user=None, j_password=None, j_ssh_key=None):
logger.debug(f'HMC: {user}@{host}, outDir: {outDir}, password: {<PASSWORD>}, ssh_key: {ssh_key is not None}, connect: {connect}')
self.host = host
self.user = user
self.password = password
self.ssh_key = ssh_key
self.outDir = outDir
self.j_host = j_host
self.j_user = j_user
self.j_password = <PASSWORD>
self.j_ssh_key = j_ssh_key
self.validDir = False
self.client = None
if connect:
self.client = self._connect()
try:
os.mkdir(outDir)
except FileExistsError:
if not os.path.isdir(outDir):
logger.error(f'File {outDir} exists and it is not a directory')
return
except OSError:
logger.error(f'Can not create directory {outDir}')
return
with open(os.path.join(outDir, WRITETEST), 'wt') as file:
try:
file.write(datetime.now().strftime("%Y-%m-%d %H:%M:%S"))
self.validDir = True
except Exception as e:
logger.error(f'Can not write into {outDir}')
return
os.remove(os.path.join(outDir, WRITETEST))
else:
# Do nothing!
pass
def _connect_old(self):
if self.ssh_key is not None:
logger.debug(f'{self.user}@{self.host}: try connection with ssh key only')
client = self._try_to_connect(None, self.ssh_key)
if client is not None:
return client
logger.debug(f'{self.user}@{self.host}: try connection with password only')
client = self._try_to_connect(self.password, None)
if client is None:
logger.error(f'{self.user}@{self.host}: connection failed')
return client
def _connect(self):
if self.j_host is not None:
logger.debug(f'Jump host configured: {self.j_host}')
if self.j_ssh_key is not None:
logger.debug(f'Try key {self.j_ssh_key} for jump host {self.j_user}@{self.j_host}')
jumphost = self._try_to_connect(self.j_host, self.j_user, None, self.j_ssh_key)
if jumphost is None:
logger.debug(
f'Failed key login on {self.j_user}@{self.j_host}')
if self.j_password is not None:
logger.debug(f'Try password for jump host {self.j_user}@{self.j_host}')
jumphost = self._try_to_connect(self.j_host, self.j_user, self.j_password, None)
if jumphost is None:
logger.debug(f'Failed password login on {self.j_user}@{self.j_host}. Aborting...')
return None
else:
logger.debug(f'Failed login on {self.j_user}@{self.j_host}. Aborting...')
return None
elif self.j_password is not None:
logger.debug(
f'Try password for jump host {self.j_user}@{self.j_host}')
jumphost = self._try_to_connect(
self.j_host, self.j_user, self.j_password, None)
if jumphost is None:
logger.debug(f'Failed password login on {self.j_user}@{self.j_host}. Aborting...')
return None
else:
logger.error(f'Jump host defined but no key and no password provided!')
return None
transport = jumphost.get_transport()
destination = (self.host, 22)
source = ("0.0.0.0", 0)
try:
channel = transport.open_channel("direct-tcpip", destination, source)
except Exception as e:
logger.error(f'Could not open channel to target. Exception: {e}')
return None
logger.debug(f'Starting connection from {self.j_host} to {self.host}')
else:
channel = None
if self.ssh_key is not None:
logger.debug(
f'Try key {self.ssh_key} for target host {self.user}@{self.host}, channel={channel}')
client = self._try_to_connect(self.host, self.user, None, self.ssh_key, sock=channel)
if client is None:
logger.debug(f'Failed key login on {self.user}@{self.host} sock={channel}')
if self.j_password is not None:
logger.debug(f'Try password target host {self.user}@{self.host} sock={channel}')
client = self._try_to_connect(self.host, self.user, self.password, None, sock=channel)
if client is None:
logger.debug(f'Failed login on {self.j_user}@{self.j_host} sock={channel}. Aborting...')
return None
else:
logger.debug(f'Failed logging on {self.j_user}@{self.j_host} sock={channel}. Aborting...')
return None
elif self.password is not None:
logger.debug(
f'Try password for target host {self.user}@{self.host}, channel={channel}')
client = self._try_to_connect(self.host, self.user, self.password, None, sock=channel)
if client is None:
logger.debug(
f'Failed password login on {self.user}@{self.host} sock={channel}')
logger.debug(f'Failed logging on {self.user}@{self.host} sock={channel}. Aborting...')
return None
else:
logger.error(f'Target host had no key and no password!')
return None
return client
def _try_to_connect(self, host, user, password, ssh_key, sock=None):
logger.debug(f'Start connection: {user}@{host}, password:{<PASSWORD>}, ssh_key: {ssh_key}, sock={sock}')
try:
client = SSHClient()
client.load_system_host_keys()
client.set_missing_host_key_policy(AutoAddPolicy())
client.connect(
host,
username=user,
password=password,
key_filename=ssh_key,
look_for_keys=True,
sock=sock
)
logger.debug(f'Connected to {user}@{host}')
except BadHostKeyException as exc:
logger.error(f'{user}@{host}: server’s host key could not be verified: {exc}')
return None
except AuthenticationException as exc:
logger.error(f'{user}@{host}: authentication error: {exc}')
return None
except SSHException as exc:
logger.error(f'{user}@{host}: SSH error: {exc}')
return None
except socket.error as exc:
logger.error(f'{user}@{host}: socket error: {exc}')
return None
except Exception as exc:
logger.error(f'{user}@{host}: unexpected error: {exc}')
return None
logger.debug(f'{user}@{host}: connection is successful')
return client
def isConnected(self):
return self.client != None
def close(self):
if self.client is not None:
self.client.close()
logger.debug('Connection closed')
else:
logger.debug('Connection was not open')
self.client = None
def runCommand(self, command, fileName):
if self.client is None:
logger.error('Client was not connected. Command not executed')
return
if not self.validDir:
logger.error('No valid output dir. Command not executed')
try:
logger.debug(f'{command} -> {fileName}')
transport = self.client.get_transport()
session = transport.open_session()
session.set_combine_stderr(True)
with open(os.path.join(self.outDir, fileName), 'wb') as file:
session.exec_command('LANG=C ' + command)
data = session.recv(BUFSIZE)
while len(data) > 0:
file.write(data)
data = session.recv(BUFSIZE)
rc = session.recv_exit_status()
logger.debug(f'Return code: {rc}')
#if rc != 0:
# logger.warning(f'RC={rc} for {self.user}@{self.host}:{command}')
except SSHException as exc:
logger.error(f'SSH error: {exc}')
return
except (OSError, IOError) as e:
logger.error(f'I/O error: {e}')
| en | 0.56687 | Client to interact with HMC # Do nothing! #if rc != 0: # logger.warning(f'RC={rc} for {self.user}@{self.host}:{command}') | 2.358685 | 2 |
aioautomatic/session.py | armills/aioautomatic | 8 | 6619081 | <reponame>armills/aioautomatic<filename>aioautomatic/session.py<gh_stars>1-10
"""Session interface for aioautomatic."""
import asyncio
import logging
from aioautomatic import base
from aioautomatic import const
from aioautomatic import data
from aioautomatic import validation
_LOGGER = logging.getLogger(__name__)
def gen_query_string(params):
"""Generate a query string from the parameter dict."""
return '&'.join('{}={}'.format(k, v) for k, v in params.items())
class Session(base.BaseApiObject):
"""Session object to manage access to a users information."""
def __init__(self, client, **kwargs):
"""Create a session object."""
super().__init__(client)
self._client = client
self._renew_handle = None
self._load_token_data(**kwargs)
def _load_token_data(self, access_token, refresh_token, expires_in, scope,
**kwargs):
"""Store the data from the access token response."""
self._access_token = access_token
self._refresh_token = refresh_token
self._scope = scope
self._request_kwargs.update({
'headers': {
'Authorization': 'Bearer {}'.format(self._access_token),
},
})
# Renew one hour before expiration
renew_time = self.loop.time() + expires_in - 3600
if self._renew_handle is not None:
self._renew_handle.cancel()
self._renew_handle = self.loop.call_at(
renew_time, lambda: self.loop.create_task(self.refresh()))
@asyncio.coroutine
def refresh(self):
"""Use the refresh token to request a new access token."""
_LOGGER.info("Refreshing the session access token.")
auth_payload = {
'client_id': self._client.client_id,
'client_secret': self._client.client_secret,
'grant_type': 'refresh_token',
'refresh_token': self._refresh_token,
}
resp = yield from self._post(const.AUTH_URL, auth_payload)
resp = validation.AUTH_TOKEN(resp)
self._load_token_data(**resp)
@asyncio.coroutine
def get_vehicle(self, vehicle_id):
"""Get a single vehicle associated with this user account.
:param vehicle_id: Vehicle ID to fetch
"""
_LOGGER.info("Fetching vehicle.")
resp = yield from self._get(const.VEHICLE_URL.format(vehicle_id))
return data.Vehicle(resp)
@asyncio.coroutine
def get_vehicles(self, **kwargs):
"""Get all vehicles associated with this user account.
:param created_at__lte: Maximum start time filter
:param created_at__gte: Minimum start time filter
:param updated_at__lte: Maximum end time filter
:param updated_at__gte: Minimum end time filter
:param vin: Vin filter
:param page: Page number of paginated result to return
:param limit: Number of results per page
"""
query = gen_query_string(validation.VEHICLES_REQUEST(kwargs))
_LOGGER.info("Fetching vehicles.")
resp = yield from self._get('?'.join((const.VEHICLES_URL, query)))
return base.ResultList(self, resp, data.Vehicle)
@asyncio.coroutine
def get_trip(self, trip_id):
"""Get a single trip associated with this user account.
:param trip_id: Trip ID to fetch
"""
_LOGGER.info("Fetching trip.")
resp = yield from self._get(const.TRIP_URL.format(trip_id))
return data.Trip(resp)
@asyncio.coroutine
def get_trips(self, **kwargs):
"""Get all vehicles associated with this user account.
:param started_at__lte: Maximum start time filter
:param started_at__gte: Minimum start time filter
:param ended_at__lte: Maximum end time filter
:param ended_at__gte: Minimum end time filter
:param vehicle: Vehicle Filter
:param tags__in: Tags Filter
:param page: Page number of paginated result to return
:param limit: Number of results per page
"""
query = gen_query_string(validation.TRIPS_REQUEST(kwargs))
_LOGGER.info("Fetching trips.")
resp = yield from self._get('?'.join((const.TRIPS_URL, query)))
return base.ResultList(self, resp, data.Trip)
@asyncio.coroutine
def get_device(self, device_id):
"""Get a single device associated with this user account.
:param device_id: Device ID to fetch
"""
_LOGGER.info("Fetching device.")
resp = yield from self._get(const.DEVICE_URL.format(device_id))
return data.Device(resp)
@asyncio.coroutine
def get_devices(self, **kwargs):
"""Get all devices associated with this user account.
:param device__serial_number: Device serial number
:param page: Page number of paginated result to return
:param limit: Number of results per page
"""
query = gen_query_string(validation.DEVICES_REQUEST(kwargs))
_LOGGER.info("Fetching devices.")
resp = yield from self._get('?'.join((const.DEVICES_URL, query)))
return base.ResultList(self, resp, data.Device)
@asyncio.coroutine
def get_user(self, **kwargs):
"""Fetch information for the specified user.
If no user is specified, fetch information for the authorized
user.
:param id: User id to fetch
"""
user_id = validation.USER_REQUEST(kwargs).get("id", "me")
_LOGGER.info("Fetching devices.")
resp = yield from self._get(const.USER_URL.format(user_id))
return data.User(self, resp)
@property
def refresh_token(self):
"""The refresh token used to authorize a new session."""
return self._refresh_token
| """Session interface for aioautomatic."""
import asyncio
import logging
from aioautomatic import base
from aioautomatic import const
from aioautomatic import data
from aioautomatic import validation
_LOGGER = logging.getLogger(__name__)
def gen_query_string(params):
"""Generate a query string from the parameter dict."""
return '&'.join('{}={}'.format(k, v) for k, v in params.items())
class Session(base.BaseApiObject):
"""Session object to manage access to a users information."""
def __init__(self, client, **kwargs):
"""Create a session object."""
super().__init__(client)
self._client = client
self._renew_handle = None
self._load_token_data(**kwargs)
def _load_token_data(self, access_token, refresh_token, expires_in, scope,
**kwargs):
"""Store the data from the access token response."""
self._access_token = access_token
self._refresh_token = refresh_token
self._scope = scope
self._request_kwargs.update({
'headers': {
'Authorization': 'Bearer {}'.format(self._access_token),
},
})
# Renew one hour before expiration
renew_time = self.loop.time() + expires_in - 3600
if self._renew_handle is not None:
self._renew_handle.cancel()
self._renew_handle = self.loop.call_at(
renew_time, lambda: self.loop.create_task(self.refresh()))
@asyncio.coroutine
def refresh(self):
"""Use the refresh token to request a new access token."""
_LOGGER.info("Refreshing the session access token.")
auth_payload = {
'client_id': self._client.client_id,
'client_secret': self._client.client_secret,
'grant_type': 'refresh_token',
'refresh_token': self._refresh_token,
}
resp = yield from self._post(const.AUTH_URL, auth_payload)
resp = validation.AUTH_TOKEN(resp)
self._load_token_data(**resp)
@asyncio.coroutine
def get_vehicle(self, vehicle_id):
"""Get a single vehicle associated with this user account.
:param vehicle_id: Vehicle ID to fetch
"""
_LOGGER.info("Fetching vehicle.")
resp = yield from self._get(const.VEHICLE_URL.format(vehicle_id))
return data.Vehicle(resp)
@asyncio.coroutine
def get_vehicles(self, **kwargs):
"""Get all vehicles associated with this user account.
:param created_at__lte: Maximum start time filter
:param created_at__gte: Minimum start time filter
:param updated_at__lte: Maximum end time filter
:param updated_at__gte: Minimum end time filter
:param vin: Vin filter
:param page: Page number of paginated result to return
:param limit: Number of results per page
"""
query = gen_query_string(validation.VEHICLES_REQUEST(kwargs))
_LOGGER.info("Fetching vehicles.")
resp = yield from self._get('?'.join((const.VEHICLES_URL, query)))
return base.ResultList(self, resp, data.Vehicle)
@asyncio.coroutine
def get_trip(self, trip_id):
"""Get a single trip associated with this user account.
:param trip_id: Trip ID to fetch
"""
_LOGGER.info("Fetching trip.")
resp = yield from self._get(const.TRIP_URL.format(trip_id))
return data.Trip(resp)
@asyncio.coroutine
def get_trips(self, **kwargs):
"""Get all vehicles associated with this user account.
:param started_at__lte: Maximum start time filter
:param started_at__gte: Minimum start time filter
:param ended_at__lte: Maximum end time filter
:param ended_at__gte: Minimum end time filter
:param vehicle: Vehicle Filter
:param tags__in: Tags Filter
:param page: Page number of paginated result to return
:param limit: Number of results per page
"""
query = gen_query_string(validation.TRIPS_REQUEST(kwargs))
_LOGGER.info("Fetching trips.")
resp = yield from self._get('?'.join((const.TRIPS_URL, query)))
return base.ResultList(self, resp, data.Trip)
@asyncio.coroutine
def get_device(self, device_id):
"""Get a single device associated with this user account.
:param device_id: Device ID to fetch
"""
_LOGGER.info("Fetching device.")
resp = yield from self._get(const.DEVICE_URL.format(device_id))
return data.Device(resp)
@asyncio.coroutine
def get_devices(self, **kwargs):
"""Get all devices associated with this user account.
:param device__serial_number: Device serial number
:param page: Page number of paginated result to return
:param limit: Number of results per page
"""
query = gen_query_string(validation.DEVICES_REQUEST(kwargs))
_LOGGER.info("Fetching devices.")
resp = yield from self._get('?'.join((const.DEVICES_URL, query)))
return base.ResultList(self, resp, data.Device)
@asyncio.coroutine
def get_user(self, **kwargs):
"""Fetch information for the specified user.
If no user is specified, fetch information for the authorized
user.
:param id: User id to fetch
"""
user_id = validation.USER_REQUEST(kwargs).get("id", "me")
_LOGGER.info("Fetching devices.")
resp = yield from self._get(const.USER_URL.format(user_id))
return data.User(self, resp)
@property
def refresh_token(self):
"""The refresh token used to authorize a new session."""
return self._refresh_token | en | 0.687502 | Session interface for aioautomatic. Generate a query string from the parameter dict. Session object to manage access to a users information. Create a session object. Store the data from the access token response. # Renew one hour before expiration Use the refresh token to request a new access token. Get a single vehicle associated with this user account. :param vehicle_id: Vehicle ID to fetch Get all vehicles associated with this user account. :param created_at__lte: Maximum start time filter :param created_at__gte: Minimum start time filter :param updated_at__lte: Maximum end time filter :param updated_at__gte: Minimum end time filter :param vin: Vin filter :param page: Page number of paginated result to return :param limit: Number of results per page Get a single trip associated with this user account. :param trip_id: Trip ID to fetch Get all vehicles associated with this user account. :param started_at__lte: Maximum start time filter :param started_at__gte: Minimum start time filter :param ended_at__lte: Maximum end time filter :param ended_at__gte: Minimum end time filter :param vehicle: Vehicle Filter :param tags__in: Tags Filter :param page: Page number of paginated result to return :param limit: Number of results per page Get a single device associated with this user account. :param device_id: Device ID to fetch Get all devices associated with this user account. :param device__serial_number: Device serial number :param page: Page number of paginated result to return :param limit: Number of results per page Fetch information for the specified user. If no user is specified, fetch information for the authorized user. :param id: User id to fetch The refresh token used to authorize a new session. | 2.735333 | 3 |
4.py | kmirzavaziri/neural-network | 0 | 6619082 | <gh_stars>0
from utils import *
visualizer = Visualizer(2, 3)
x_train, y_train, x_test, y_test = helpers.dataset(200, 400)
visualizer.add(x_test, y_test, title='Real Classes')
# Iterate and predict over some different annealing degrees
PARAMETERS_COUNT = 2
HIDDEN_LAYER_COUNT = 3
CLASSES_COUNT = 2
EPSILON = .01
R_LAMBDA = .01
for ANNEALING_DEGREE in [.2, .1, .01, .001, 0]:
print(ANNEALING_DEGREE)
nn = NeuralNetwork(
[PARAMETERS_COUNT, HIDDEN_LAYER_COUNT, CLASSES_COUNT], EPSILON, R_LAMBDA,
annealing_degree=ANNEALING_DEGREE
)
nn.train(x_train, y_train)
y_pred = nn.predict(x_test)
visualizer.add(
x_test, y_pred,
title=f'Annealing degree {ANNEALING_DEGREE} \nLoss {helpers.loss(y_test, nn.outputs_history)}'
)
visualizer.show('4.png')
| from utils import *
visualizer = Visualizer(2, 3)
x_train, y_train, x_test, y_test = helpers.dataset(200, 400)
visualizer.add(x_test, y_test, title='Real Classes')
# Iterate and predict over some different annealing degrees
PARAMETERS_COUNT = 2
HIDDEN_LAYER_COUNT = 3
CLASSES_COUNT = 2
EPSILON = .01
R_LAMBDA = .01
for ANNEALING_DEGREE in [.2, .1, .01, .001, 0]:
print(ANNEALING_DEGREE)
nn = NeuralNetwork(
[PARAMETERS_COUNT, HIDDEN_LAYER_COUNT, CLASSES_COUNT], EPSILON, R_LAMBDA,
annealing_degree=ANNEALING_DEGREE
)
nn.train(x_train, y_train)
y_pred = nn.predict(x_test)
visualizer.add(
x_test, y_pred,
title=f'Annealing degree {ANNEALING_DEGREE} \nLoss {helpers.loss(y_test, nn.outputs_history)}'
)
visualizer.show('4.png') | en | 0.775738 | # Iterate and predict over some different annealing degrees | 3.114371 | 3 |
aoc_wim/aoc2015/q13.py | wimglenn/advent-of-code-wim | 20 | 6619083 | """
--- Day 13: Knights of the Dinner Table ---
https://adventofcode.com/2015/day/13
"""
from collections import defaultdict
from itertools import permutations
from aocd import data
def parsed(data, extra_name=None):
d = defaultdict(int)
names = {extra_name} - {None}
for line in data.splitlines():
words = line.split()
name0 = words[0]
name1 = words[-1].rstrip(".")
n = {"gain": 1, "lose": -1}[words[2]] * int(words[3])
d[(name0, name1)] = n
names |= {name0, name1}
return names, d
def get_best_plan(data, extra_name=None):
names, d = parsed(data, extra_name)
n = len(names)
plans = permutations(names)
happiness = {}
for plan in plans:
total = 0
for i in range(n):
person = plan[i]
left = plan[(i - 1) % n]
right = plan[(i + 1) % n]
total += d[(person, left)]
total += d[(person, right)]
happiness[plan] = total
return max(happiness.values())
print(get_best_plan(data))
print(get_best_plan(data, extra_name="wim"))
| """
--- Day 13: Knights of the Dinner Table ---
https://adventofcode.com/2015/day/13
"""
from collections import defaultdict
from itertools import permutations
from aocd import data
def parsed(data, extra_name=None):
d = defaultdict(int)
names = {extra_name} - {None}
for line in data.splitlines():
words = line.split()
name0 = words[0]
name1 = words[-1].rstrip(".")
n = {"gain": 1, "lose": -1}[words[2]] * int(words[3])
d[(name0, name1)] = n
names |= {name0, name1}
return names, d
def get_best_plan(data, extra_name=None):
names, d = parsed(data, extra_name)
n = len(names)
plans = permutations(names)
happiness = {}
for plan in plans:
total = 0
for i in range(n):
person = plan[i]
left = plan[(i - 1) % n]
right = plan[(i + 1) % n]
total += d[(person, left)]
total += d[(person, right)]
happiness[plan] = total
return max(happiness.values())
print(get_best_plan(data))
print(get_best_plan(data, extra_name="wim"))
| en | 0.868216 | --- Day 13: Knights of the Dinner Table --- https://adventofcode.com/2015/day/13 | 3.637962 | 4 |
dataset.py | JackyWang2001/COGS118B | 2 | 6619084 | import torch
import os
import glob
import numpy as np
from torch import nn
from PIL import Image
from torch.utils.data import Dataset
from torchvision import transforms
from utils import *
def round_nearest(x, p):
return ((x - 1) // p + 1) * p
class ADE20K_Dataset(Dataset):
"""
ADE20K dataset with anime style transfer
"""
def __init__(self, root, transform=None, status="Train"):
super(ADE20K_Dataset, self).__init__()
self.root = root
self.transform = transform
self.label_downsample_rate = 256 / 21
if status == "Train":
self.dir = os.path.join(self.root, "images", "training")
self.styles = ("Hayao", "Hosoda")
else:
self.dir = os.path.join(self.root, "images", "validation")
self.styles = ("Hayao", "Shinkai")
self.classes_path = concat_subfolder(get_subfolder(self.dir))
# save mask as a dict because one mask corresponds to two images
self.images_path, self.masks_path = [], {}
for folder in self.classes_path:
for style in self.styles:
style = "*_" + style + ".jpg"
for img in glob.glob(os.path.join(folder, style)):
self.images_path.append(img)
for mask in glob.glob(os.path.join(folder, "*_seg.png")):
img_name = mask.split("/")[-1].replace("_seg.png", "")
self.masks_path[img_name] = mask
def __getitem__(self, index):
"""
get image and mask,
:param index:
:return:
"""
img_path = self.images_path[index]
img_name = img_path.split("/")[-1]
for style in self.styles:
style = "_" + style + ".jpg"
img_name = img_name.replace(style, "")
mask_path = self.masks_path[img_name]
img = Image.open(img_path).convert("RGB")
mask = Image.open(mask_path).convert("L")
# resize mask into the same dim with CartoonGAN outputs
mask = mask.resize((224, 224), resample=Image.NEAREST)
# convert mask img into labels
mask = np.array(mask) // self.label_downsample_rate
mask = torch.from_numpy(mask).int().long()
# apply transform
if self.transform is None:
self.transform = transforms.Compose([
transforms.Resize((224, 224)),
transforms.ToTensor(),
# compute normalizing terms in prepare_dataset.py
transforms.Normalize(mean=[0.5169, 0.4734, 0.4078], std=[0.2075, 0.2059, 0.1907])
])
img = self.transform(img)
return img, mask
def __len__(self):
return len(self.images_path)
| import torch
import os
import glob
import numpy as np
from torch import nn
from PIL import Image
from torch.utils.data import Dataset
from torchvision import transforms
from utils import *
def round_nearest(x, p):
return ((x - 1) // p + 1) * p
class ADE20K_Dataset(Dataset):
"""
ADE20K dataset with anime style transfer
"""
def __init__(self, root, transform=None, status="Train"):
super(ADE20K_Dataset, self).__init__()
self.root = root
self.transform = transform
self.label_downsample_rate = 256 / 21
if status == "Train":
self.dir = os.path.join(self.root, "images", "training")
self.styles = ("Hayao", "Hosoda")
else:
self.dir = os.path.join(self.root, "images", "validation")
self.styles = ("Hayao", "Shinkai")
self.classes_path = concat_subfolder(get_subfolder(self.dir))
# save mask as a dict because one mask corresponds to two images
self.images_path, self.masks_path = [], {}
for folder in self.classes_path:
for style in self.styles:
style = "*_" + style + ".jpg"
for img in glob.glob(os.path.join(folder, style)):
self.images_path.append(img)
for mask in glob.glob(os.path.join(folder, "*_seg.png")):
img_name = mask.split("/")[-1].replace("_seg.png", "")
self.masks_path[img_name] = mask
def __getitem__(self, index):
"""
get image and mask,
:param index:
:return:
"""
img_path = self.images_path[index]
img_name = img_path.split("/")[-1]
for style in self.styles:
style = "_" + style + ".jpg"
img_name = img_name.replace(style, "")
mask_path = self.masks_path[img_name]
img = Image.open(img_path).convert("RGB")
mask = Image.open(mask_path).convert("L")
# resize mask into the same dim with CartoonGAN outputs
mask = mask.resize((224, 224), resample=Image.NEAREST)
# convert mask img into labels
mask = np.array(mask) // self.label_downsample_rate
mask = torch.from_numpy(mask).int().long()
# apply transform
if self.transform is None:
self.transform = transforms.Compose([
transforms.Resize((224, 224)),
transforms.ToTensor(),
# compute normalizing terms in prepare_dataset.py
transforms.Normalize(mean=[0.5169, 0.4734, 0.4078], std=[0.2075, 0.2059, 0.1907])
])
img = self.transform(img)
return img, mask
def __len__(self):
return len(self.images_path)
| en | 0.727076 | ADE20K dataset with anime style transfer # save mask as a dict because one mask corresponds to two images get image and mask, :param index: :return: # resize mask into the same dim with CartoonGAN outputs # convert mask img into labels # apply transform # compute normalizing terms in prepare_dataset.py | 2.564476 | 3 |
intime_sdk/intime/send_messages.py | njnur/InTime-Python-SDK | 0 | 6619085 | from typing import Optional, List
from intime_sdk.core.base import APIConnector
from intime_sdk.intime import constants
class Messages(APIConnector):
"""
Class for manipulating SMS functionalities including creating & sending SMS, removing a scheduled sms etc.
"""
def send_sms(self, sender_title: str, message_body: str, recipients: List, group_id: str,
is_flash_msg: bool = False, is_multi_sms: bool = False, send_date: Optional[str] = None,
status_url: Optional[str] = None, check_block_list: Optional[bool] = False,
encrypt_msg: Optional[bool] = False, trans_id: Optional[List] = None,
):
"""
Method for crating and sending sms.
:param sender_title: (String) Sender of the message. Use maximum 11 alphanumeric characters or 15 numeric chars.
Allowed alphanumeric chars is aA-zZ, 0-9, space, .(dot), -(binding char), or +(plus).
Only English characters are allowed.
:param message_body: (String) Message text.
SMS is allowed to contain a maximum of 160 characters. Multi-SMS is allowed to contain a maximum of
804 characters (6 SMS with 134 characters each).
:param recipients: (List) Phone number of the recipients to receive the message. At least one recipient or group
must be addressed.
:param group_id: (String) The groups Identification number.
:param is_flash_msg: (Boolean | Default: False) If this parameter is set to True, method will send a Flash
message (maximum 160 characters) and otherwise it will send a Normal message.
:param is_multi_sms: (Boolean | Default: False) If this parameter is set to True, method will send send up to
six SMS or 804 characters and otherwise it will send a Normal message.
:param send_date: (Optional; String | Default: None) Date/time for scheduled sending, If the message is to be
sent directly/right now, ignore this parameter. (Format: 2012-01-13T14:41:00)
:param status_url: (Optional; String | Default: None) URL for the server to send message status callbacks.
:param check_block_list: (Boolean | Default: False) If this parameter is set to True, method will check
the user's block list for group mailings.
:param encrypt_msg: (Boolean | Default: False) Flag that indicates whether the message should be encrypted after
it's sent. Sent messages cannot be read in Messenger.
:param trans_id: (Optional; String | Default: None) Value to send to the status url with the callback, this
could be a uniq id generated for each recipient and used in callback to link the status callback.
This is mandatory if status_url is used!
:return: API response in dictionary format with a status param
Format::: {
"status": True/False,
"data": "Sample Response/3545354535"
}
"""
recipients_list = """"""
if status_url:
if trans_id:
if len(trans_id) == len(recipients):
for inc in range(0, len(recipients)):
recipients_list += constants.RECIPIENT_XML.format(
recipient=recipients[inc],
trans_id=trans_id[inc]
)
else:
raise ValueError("Each recipients should have a trans_id for Status CallBack")
else:
raise ValueError("trans_id is needed for Status CallBack")
else:
for recipient in recipients:
recipients_list += constants.RECIPIENT_XML.format(
recipient=recipient,
trans_id=''
)
xml_data = constants.SEND_SMS_XML.format(
is_flash_msg="1" if is_flash_msg else "0",
is_multi_sms="1" if is_multi_sms else "0",
send_date=constants.SEND_DATE_XML.format(send_date=send_date) if send_date else '',
sender_title=sender_title,
message_body=message_body,
status_url=constants.STATUS_URL_XML.format(status_url=status_url) if status_url else '',
check_block_list="1" if check_block_list else "0",
encrypt_msg="1" if encrypt_msg else "0",
recipients=recipients_list,
group_id=group_id
)
return self._post(api_url=constants.SEND_SMS_URI,
data=xml_data.encode())
| from typing import Optional, List
from intime_sdk.core.base import APIConnector
from intime_sdk.intime import constants
class Messages(APIConnector):
"""
Class for manipulating SMS functionalities including creating & sending SMS, removing a scheduled sms etc.
"""
def send_sms(self, sender_title: str, message_body: str, recipients: List, group_id: str,
is_flash_msg: bool = False, is_multi_sms: bool = False, send_date: Optional[str] = None,
status_url: Optional[str] = None, check_block_list: Optional[bool] = False,
encrypt_msg: Optional[bool] = False, trans_id: Optional[List] = None,
):
"""
Method for crating and sending sms.
:param sender_title: (String) Sender of the message. Use maximum 11 alphanumeric characters or 15 numeric chars.
Allowed alphanumeric chars is aA-zZ, 0-9, space, .(dot), -(binding char), or +(plus).
Only English characters are allowed.
:param message_body: (String) Message text.
SMS is allowed to contain a maximum of 160 characters. Multi-SMS is allowed to contain a maximum of
804 characters (6 SMS with 134 characters each).
:param recipients: (List) Phone number of the recipients to receive the message. At least one recipient or group
must be addressed.
:param group_id: (String) The groups Identification number.
:param is_flash_msg: (Boolean | Default: False) If this parameter is set to True, method will send a Flash
message (maximum 160 characters) and otherwise it will send a Normal message.
:param is_multi_sms: (Boolean | Default: False) If this parameter is set to True, method will send send up to
six SMS or 804 characters and otherwise it will send a Normal message.
:param send_date: (Optional; String | Default: None) Date/time for scheduled sending, If the message is to be
sent directly/right now, ignore this parameter. (Format: 2012-01-13T14:41:00)
:param status_url: (Optional; String | Default: None) URL for the server to send message status callbacks.
:param check_block_list: (Boolean | Default: False) If this parameter is set to True, method will check
the user's block list for group mailings.
:param encrypt_msg: (Boolean | Default: False) Flag that indicates whether the message should be encrypted after
it's sent. Sent messages cannot be read in Messenger.
:param trans_id: (Optional; String | Default: None) Value to send to the status url with the callback, this
could be a uniq id generated for each recipient and used in callback to link the status callback.
This is mandatory if status_url is used!
:return: API response in dictionary format with a status param
Format::: {
"status": True/False,
"data": "Sample Response/3545354535"
}
"""
recipients_list = """"""
if status_url:
if trans_id:
if len(trans_id) == len(recipients):
for inc in range(0, len(recipients)):
recipients_list += constants.RECIPIENT_XML.format(
recipient=recipients[inc],
trans_id=trans_id[inc]
)
else:
raise ValueError("Each recipients should have a trans_id for Status CallBack")
else:
raise ValueError("trans_id is needed for Status CallBack")
else:
for recipient in recipients:
recipients_list += constants.RECIPIENT_XML.format(
recipient=recipient,
trans_id=''
)
xml_data = constants.SEND_SMS_XML.format(
is_flash_msg="1" if is_flash_msg else "0",
is_multi_sms="1" if is_multi_sms else "0",
send_date=constants.SEND_DATE_XML.format(send_date=send_date) if send_date else '',
sender_title=sender_title,
message_body=message_body,
status_url=constants.STATUS_URL_XML.format(status_url=status_url) if status_url else '',
check_block_list="1" if check_block_list else "0",
encrypt_msg="1" if encrypt_msg else "0",
recipients=recipients_list,
group_id=group_id
)
return self._post(api_url=constants.SEND_SMS_URI,
data=xml_data.encode())
| en | 0.679723 | Class for manipulating SMS functionalities including creating & sending SMS, removing a scheduled sms etc. Method for crating and sending sms. :param sender_title: (String) Sender of the message. Use maximum 11 alphanumeric characters or 15 numeric chars. Allowed alphanumeric chars is aA-zZ, 0-9, space, .(dot), -(binding char), or +(plus). Only English characters are allowed. :param message_body: (String) Message text. SMS is allowed to contain a maximum of 160 characters. Multi-SMS is allowed to contain a maximum of 804 characters (6 SMS with 134 characters each). :param recipients: (List) Phone number of the recipients to receive the message. At least one recipient or group must be addressed. :param group_id: (String) The groups Identification number. :param is_flash_msg: (Boolean | Default: False) If this parameter is set to True, method will send a Flash message (maximum 160 characters) and otherwise it will send a Normal message. :param is_multi_sms: (Boolean | Default: False) If this parameter is set to True, method will send send up to six SMS or 804 characters and otherwise it will send a Normal message. :param send_date: (Optional; String | Default: None) Date/time for scheduled sending, If the message is to be sent directly/right now, ignore this parameter. (Format: 2012-01-13T14:41:00) :param status_url: (Optional; String | Default: None) URL for the server to send message status callbacks. :param check_block_list: (Boolean | Default: False) If this parameter is set to True, method will check the user's block list for group mailings. :param encrypt_msg: (Boolean | Default: False) Flag that indicates whether the message should be encrypted after it's sent. Sent messages cannot be read in Messenger. :param trans_id: (Optional; String | Default: None) Value to send to the status url with the callback, this could be a uniq id generated for each recipient and used in callback to link the status callback. This is mandatory if status_url is used! :return: API response in dictionary format with a status param Format::: { "status": True/False, "data": "Sample Response/3545354535" } | 2.681077 | 3 |
apps/news/migrations/0001_initial.py | deniskrumko/dendynotdead | 0 | 6619086 | <reponame>deniskrumko/dendynotdead<gh_stars>0
# -*- coding: utf-8 -*-
# Generated by Django 1.11.7 on 2018-01-27 16:58
from __future__ import unicode_literals
import apps.core.models
import autoslug.fields
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import django_extensions.db.fields
class Migration(migrations.Migration):
initial = True
dependencies = [
('music', '0001_initial'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='News',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, verbose_name='created')),
('modified', django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified')),
('is_active', models.BooleanField(default=True, verbose_name='Is active')),
('title', models.CharField(max_length=255, null=True, verbose_name='Title')),
('slug', autoslug.fields.AutoSlugField(editable=False, populate_from='title', unique_with=('title',), verbose_name='Slug')),
('preview', models.TextField(blank=True, null=True, verbose_name='Preview')),
('full_text', models.TextField(blank=True, null=True, verbose_name='Full text')),
('image', models.ImageField(blank=True, null=True, upload_to=apps.core.models.BaseModel.file_upload_path, verbose_name='Image')),
('views', models.PositiveIntegerField(default=0, verbose_name='Views')),
('author', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='news_author', to=settings.AUTH_USER_MODEL, verbose_name='Author')),
('tracks', models.ManyToManyField(blank=True, related_name='news', to='music.Track', verbose_name='Tracks')),
],
options={
'verbose_name': 'News',
'verbose_name_plural': 'News',
},
),
]
| # -*- coding: utf-8 -*-
# Generated by Django 1.11.7 on 2018-01-27 16:58
from __future__ import unicode_literals
import apps.core.models
import autoslug.fields
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import django_extensions.db.fields
class Migration(migrations.Migration):
initial = True
dependencies = [
('music', '0001_initial'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='News',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, verbose_name='created')),
('modified', django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified')),
('is_active', models.BooleanField(default=True, verbose_name='Is active')),
('title', models.CharField(max_length=255, null=True, verbose_name='Title')),
('slug', autoslug.fields.AutoSlugField(editable=False, populate_from='title', unique_with=('title',), verbose_name='Slug')),
('preview', models.TextField(blank=True, null=True, verbose_name='Preview')),
('full_text', models.TextField(blank=True, null=True, verbose_name='Full text')),
('image', models.ImageField(blank=True, null=True, upload_to=apps.core.models.BaseModel.file_upload_path, verbose_name='Image')),
('views', models.PositiveIntegerField(default=0, verbose_name='Views')),
('author', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='news_author', to=settings.AUTH_USER_MODEL, verbose_name='Author')),
('tracks', models.ManyToManyField(blank=True, related_name='news', to='music.Track', verbose_name='Tracks')),
],
options={
'verbose_name': 'News',
'verbose_name_plural': 'News',
},
),
] | en | 0.7115 | # -*- coding: utf-8 -*- # Generated by Django 1.11.7 on 2018-01-27 16:58 | 1.704522 | 2 |
make_readme.py | HKUNLP/diagrams_toolkit | 0 | 6619087 | <gh_stars>0
import os
import importlib
import matplotlib.pyplot as plt
URL = "https://github.com/HKUNLP/diagrams_toolkit"
README_STR = """# diagrams_toolkit
Source code for diagrams in the paper of HKU NLPers.
## Usage:
Search for the picture shown below which fits your needs, get into the code, download the code and adapt for your needs(e.g. change the value and color, make the generated figure in pdf format)
"""
PAPERS_DIR = './papers'
all_plots = []
for paper_dir in os.listdir(PAPERS_DIR):
# each dir is a paper,a nd this function will
if os.path.isfile(paper_dir):
# skip the README.md and other non-dir-format items
continue
for plot_dir in os.listdir(os.path.join(PAPERS_DIR, paper_dir)):
if plot_dir.endswith(".py"):
# For python, call plot function and get the fig/table path.
figure_path = importlib.import_module("papers.{}.{}".format(paper_dir, plot_dir.split('.')[0])).plot()
plt.close() # CLose the plt to avoid affecting next picture.
all_plots.append({"figure_path": "{}/blob/main/papers/{}/{}".format(URL, paper_dir, figure_path),
"code_path": "{}/blob/main/papers/{}/{}".format(URL, paper_dir, plot_dir),
"paper": paper_dir})
elif plot_dir.endswith(".tex"):
# TODO: For tex, compile it and get the fig/table path.
pass
else:
pass
HEADER = "| Figure | Code Source | Paper |\n| ---- | ---- | ---- |"
ROWS = []
for plot_info in all_plots:
ROWS.append(
"""| <a href="{}"> <img src="{}" width="300" /></a> | [code]({}) | {} |""".format(plot_info['figure_path'],
plot_info['figure_path'],
plot_info['code_path'],
plot_info['paper']))
print("{}\n\n{}\n{}".format(README_STR, HEADER, '\n'.join(ROWS)))
| import os
import importlib
import matplotlib.pyplot as plt
URL = "https://github.com/HKUNLP/diagrams_toolkit"
README_STR = """# diagrams_toolkit
Source code for diagrams in the paper of HKU NLPers.
## Usage:
Search for the picture shown below which fits your needs, get into the code, download the code and adapt for your needs(e.g. change the value and color, make the generated figure in pdf format)
"""
PAPERS_DIR = './papers'
all_plots = []
for paper_dir in os.listdir(PAPERS_DIR):
# each dir is a paper,a nd this function will
if os.path.isfile(paper_dir):
# skip the README.md and other non-dir-format items
continue
for plot_dir in os.listdir(os.path.join(PAPERS_DIR, paper_dir)):
if plot_dir.endswith(".py"):
# For python, call plot function and get the fig/table path.
figure_path = importlib.import_module("papers.{}.{}".format(paper_dir, plot_dir.split('.')[0])).plot()
plt.close() # CLose the plt to avoid affecting next picture.
all_plots.append({"figure_path": "{}/blob/main/papers/{}/{}".format(URL, paper_dir, figure_path),
"code_path": "{}/blob/main/papers/{}/{}".format(URL, paper_dir, plot_dir),
"paper": paper_dir})
elif plot_dir.endswith(".tex"):
# TODO: For tex, compile it and get the fig/table path.
pass
else:
pass
HEADER = "| Figure | Code Source | Paper |\n| ---- | ---- | ---- |"
ROWS = []
for plot_info in all_plots:
ROWS.append(
"""| <a href="{}"> <img src="{}" width="300" /></a> | [code]({}) | {} |""".format(plot_info['figure_path'],
plot_info['figure_path'],
plot_info['code_path'],
plot_info['paper']))
print("{}\n\n{}\n{}".format(README_STR, HEADER, '\n'.join(ROWS))) | en | 0.711678 | # diagrams_toolkit Source code for diagrams in the paper of HKU NLPers. ## Usage: Search for the picture shown below which fits your needs, get into the code, download the code and adapt for your needs(e.g. change the value and color, make the generated figure in pdf format) # each dir is a paper,a nd this function will # skip the README.md and other non-dir-format items # For python, call plot function and get the fig/table path. # CLose the plt to avoid affecting next picture. # TODO: For tex, compile it and get the fig/table path. | <a href="{}"> <img src="{}" width="300" /></a> | [code]({}) | {} | | 2.79695 | 3 |
fairseq/data/truncate_last_element_dataset.py | Usstasikus/fact-check-summarization | 41 | 6619088 | <reponame>Usstasikus/fact-check-summarization<gh_stars>10-100
# Copyright (c) Facebook, Inc. and its affiliates.
# Modifications Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import numpy as np
from . import BaseWrapperDataset
class TruncateLastElementDataset(BaseWrapperDataset):
def __init__(self, dataset):
super().__init__(dataset)
self.dataset = dataset
def __getitem__(self, index):
item = self.dataset[index]
item_len = item.size(0)
if item_len > 0:
item = item[:-1]
return item
# @property
# def sizes(self):
# return np.minimum(self.dataset.sizes, self.truncation_length)
# def __len__(self):
# return len(self.dataset)
| # Copyright (c) Facebook, Inc. and its affiliates.
# Modifications Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import numpy as np
from . import BaseWrapperDataset
class TruncateLastElementDataset(BaseWrapperDataset):
def __init__(self, dataset):
super().__init__(dataset)
self.dataset = dataset
def __getitem__(self, index):
item = self.dataset[index]
item_len = item.size(0)
if item_len > 0:
item = item[:-1]
return item
# @property
# def sizes(self):
# return np.minimum(self.dataset.sizes, self.truncation_length)
# def __len__(self):
# return len(self.dataset) | en | 0.752618 | # Copyright (c) Facebook, Inc. and its affiliates. # Modifications Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. # @property # def sizes(self): # return np.minimum(self.dataset.sizes, self.truncation_length) # def __len__(self): # return len(self.dataset) | 2.376317 | 2 |
people/datasets/generic.py | dluvizon/3d-pose-consensus | 5 | 6619089 | <reponame>dluvizon/3d-pose-consensus
import os
import numpy as np
from ..utils import *
def project_gt_poses_to_anchors(poses, anchors):
"""Project normalized poses to the normalized anchors coordinates.
If multiple poses are given, use the closest pose for each anchor.
# Parameters
poses: single pose (num_joints, dim+1), which will be replicated for
every anchor, one pose per anchor (num_anchors, num_joints, dim+1).
anchors: reference anchors array (num_anchors, 4).
# Return
Projected poses to anchors (num_anchors, num_joints, dim+1).
"""
assert poses.ndim in [2, 3] and anchors.ndim == 2, \
'Invalid dimensions for poses {} and/or anchors {}'.format(
poses.shape, anchors.shape)
num_anchors = anchors.shape[0]
dim = poses.shape[-1] - 1
if poses.ndim == 3:
assert poses.shape[0] == num_anchors, \
'Incompatible num_anchors on pose {}'.format(poses.shape[0])
num_joints = poses.shape[1]
else:
num_joints = poses.shape[0]
poses = np.expand_dims(poses, axis=0)
poses = np.tile(poses, (num_anchors, 1, 1))
poses = np.reshape(poses, (num_anchors * num_joints, -1))
anchors = np.expand_dims(anchors, axis=1)
anchors = np.tile(anchors, (1, num_joints, 1))
anchors = np.reshape(anchors, (num_anchors * num_joints, -1))
poses[:, 0:2] = (poses[:, 0:2] - anchors[:, 0:2]) \
/ (anchors[:, 2:4] - anchors[:, 0:2])
vis = get_visible_joints(poses[:, :2])
poses[:, dim] = np.where(np.isnan(vis), poses[:, dim], vis)
poses = np.reshape(poses, (num_anchors, num_joints, -1))
return poses
def inverse_project_2dposes_from_anchors(poses, anchors):
"""Project an array of normalized 2D poses on the anchors coordinates to
the image crop normalized coordinates.
# Parameters
poses: poses array (num_anchors, num_joints, dim+1)
anchors: reference anchors array (num_anchors, 4)
# Return
Inverse projected poses (num_anchors, num_joints, dim+1)
"""
assert poses.ndim == 3 and anchors.ndim == 2 and anchors.shape[-1] == 4, \
'Invalid dimensions for pose {} and/or anchors {}'.format(
poses.shape, anchors.shape)
num_anchors, num_joints = poses.shape[0:2]
poses = np.reshape(poses, (num_anchors * num_joints, -1))
anchors = np.expand_dims(anchors, axis=1)
anchors = np.tile(anchors, (1, num_joints, 1))
anchors = np.reshape(anchors, (num_anchors * num_joints, -1))
poses[:, 0:2] = poses[:, 0:2]*(anchors[:, 2:4] - anchors[:, 0:2]) \
+ anchors[:, 0:2]
poses = np.reshape(poses, (num_anchors, num_joints, -1))
return poses
def compute_anchors_reference(anchors, afmat, imsize):
"""Compute the anchor references (`aref` field), based on anchors, afmat,
and ont the absolute image size (img_w, img_h).
"""
aux = np.zeros((len(anchors), 2, 2))
aux[:, 0, :] = 0. # corners (0, 0)
aux[:, 1, :] = 1. # corners (1, 1)
aux = inverse_project_2dposes_from_anchors(aux, anchors)
aux = transform_pose_sequence(afmat, aux, inverse=True)
xc = np.mean(aux[:, :, 0], axis=-1, keepdims=True)
yc = np.mean(aux[:, :, 1], axis=-1, keepdims=True)
wchc = np.abs(aux[:, 1, :] - aux[:, 0, :])
aref = np.concatenate([xc, yc, wchc], axis=-1)
aref[:, 0::2] /= imsize[0]
aref[:, 1::2] /= imsize[1]
return aref
def compute_window_reference(afmat, imsize):
aux = np.zeros((2, 2))
aux[0, :] = 0. # corners (0, 0)
aux[1, :] = 1. # corners (1, 1)
aux = transform_2d_points(afmat, aux, transpose=True, inverse=True)
xc = np.mean(aux[:, 0], axis=-1, keepdims=True)
yc = np.mean(aux[:, 1], axis=-1, keepdims=True)
wchc = np.abs(aux[1, :] - aux[0, :])
aref = np.concatenate([xc, yc, wchc], axis=-1)
aref[0::2] /= imsize[0]
aref[1::2] /= imsize[1]
return aref
class GenericDataset(object):
"""Generic implementation for a dataset class.
"""
def __init__(self,
dataset_path,
dataconf,
poselayout,
remove_outer_joints,
preprocess_mode):
self.dataset_path = dataset_path
self.dataconf = dataconf
self.poselayout = poselayout
self.remove_outer_joints = remove_outer_joints
self.preprocess_mode = preprocess_mode
| import os
import numpy as np
from ..utils import *
def project_gt_poses_to_anchors(poses, anchors):
"""Project normalized poses to the normalized anchors coordinates.
If multiple poses are given, use the closest pose for each anchor.
# Parameters
poses: single pose (num_joints, dim+1), which will be replicated for
every anchor, one pose per anchor (num_anchors, num_joints, dim+1).
anchors: reference anchors array (num_anchors, 4).
# Return
Projected poses to anchors (num_anchors, num_joints, dim+1).
"""
assert poses.ndim in [2, 3] and anchors.ndim == 2, \
'Invalid dimensions for poses {} and/or anchors {}'.format(
poses.shape, anchors.shape)
num_anchors = anchors.shape[0]
dim = poses.shape[-1] - 1
if poses.ndim == 3:
assert poses.shape[0] == num_anchors, \
'Incompatible num_anchors on pose {}'.format(poses.shape[0])
num_joints = poses.shape[1]
else:
num_joints = poses.shape[0]
poses = np.expand_dims(poses, axis=0)
poses = np.tile(poses, (num_anchors, 1, 1))
poses = np.reshape(poses, (num_anchors * num_joints, -1))
anchors = np.expand_dims(anchors, axis=1)
anchors = np.tile(anchors, (1, num_joints, 1))
anchors = np.reshape(anchors, (num_anchors * num_joints, -1))
poses[:, 0:2] = (poses[:, 0:2] - anchors[:, 0:2]) \
/ (anchors[:, 2:4] - anchors[:, 0:2])
vis = get_visible_joints(poses[:, :2])
poses[:, dim] = np.where(np.isnan(vis), poses[:, dim], vis)
poses = np.reshape(poses, (num_anchors, num_joints, -1))
return poses
def inverse_project_2dposes_from_anchors(poses, anchors):
"""Project an array of normalized 2D poses on the anchors coordinates to
the image crop normalized coordinates.
# Parameters
poses: poses array (num_anchors, num_joints, dim+1)
anchors: reference anchors array (num_anchors, 4)
# Return
Inverse projected poses (num_anchors, num_joints, dim+1)
"""
assert poses.ndim == 3 and anchors.ndim == 2 and anchors.shape[-1] == 4, \
'Invalid dimensions for pose {} and/or anchors {}'.format(
poses.shape, anchors.shape)
num_anchors, num_joints = poses.shape[0:2]
poses = np.reshape(poses, (num_anchors * num_joints, -1))
anchors = np.expand_dims(anchors, axis=1)
anchors = np.tile(anchors, (1, num_joints, 1))
anchors = np.reshape(anchors, (num_anchors * num_joints, -1))
poses[:, 0:2] = poses[:, 0:2]*(anchors[:, 2:4] - anchors[:, 0:2]) \
+ anchors[:, 0:2]
poses = np.reshape(poses, (num_anchors, num_joints, -1))
return poses
def compute_anchors_reference(anchors, afmat, imsize):
"""Compute the anchor references (`aref` field), based on anchors, afmat,
and ont the absolute image size (img_w, img_h).
"""
aux = np.zeros((len(anchors), 2, 2))
aux[:, 0, :] = 0. # corners (0, 0)
aux[:, 1, :] = 1. # corners (1, 1)
aux = inverse_project_2dposes_from_anchors(aux, anchors)
aux = transform_pose_sequence(afmat, aux, inverse=True)
xc = np.mean(aux[:, :, 0], axis=-1, keepdims=True)
yc = np.mean(aux[:, :, 1], axis=-1, keepdims=True)
wchc = np.abs(aux[:, 1, :] - aux[:, 0, :])
aref = np.concatenate([xc, yc, wchc], axis=-1)
aref[:, 0::2] /= imsize[0]
aref[:, 1::2] /= imsize[1]
return aref
def compute_window_reference(afmat, imsize):
aux = np.zeros((2, 2))
aux[0, :] = 0. # corners (0, 0)
aux[1, :] = 1. # corners (1, 1)
aux = transform_2d_points(afmat, aux, transpose=True, inverse=True)
xc = np.mean(aux[:, 0], axis=-1, keepdims=True)
yc = np.mean(aux[:, 1], axis=-1, keepdims=True)
wchc = np.abs(aux[1, :] - aux[0, :])
aref = np.concatenate([xc, yc, wchc], axis=-1)
aref[0::2] /= imsize[0]
aref[1::2] /= imsize[1]
return aref
class GenericDataset(object):
"""Generic implementation for a dataset class.
"""
def __init__(self,
dataset_path,
dataconf,
poselayout,
remove_outer_joints,
preprocess_mode):
self.dataset_path = dataset_path
self.dataconf = dataconf
self.poselayout = poselayout
self.remove_outer_joints = remove_outer_joints
self.preprocess_mode = preprocess_mode | en | 0.693007 | Project normalized poses to the normalized anchors coordinates. If multiple poses are given, use the closest pose for each anchor. # Parameters poses: single pose (num_joints, dim+1), which will be replicated for every anchor, one pose per anchor (num_anchors, num_joints, dim+1). anchors: reference anchors array (num_anchors, 4). # Return Projected poses to anchors (num_anchors, num_joints, dim+1). Project an array of normalized 2D poses on the anchors coordinates to the image crop normalized coordinates. # Parameters poses: poses array (num_anchors, num_joints, dim+1) anchors: reference anchors array (num_anchors, 4) # Return Inverse projected poses (num_anchors, num_joints, dim+1) Compute the anchor references (`aref` field), based on anchors, afmat, and ont the absolute image size (img_w, img_h). # corners (0, 0) # corners (1, 1) # corners (0, 0) # corners (1, 1) Generic implementation for a dataset class. | 2.58708 | 3 |
Flask/Blog/post/forms.py | LieonShelly/PythonFun | 0 | 6619090 | from flask_wtf import FlaskForm
from wtforms import StringField, PasswordField, SubmitField, BooleanField, TextField
from wtforms.validators import DataRequired, Length, Email, EqualTo, ValidationError
from flask_wtf.file import FileAllowed, FileField
class PostForm(FlaskForm):
title = StringField('Title', validators=[DataRequired()])
content = TextField('Content',validators=[DataRequired()])
submit = SubmitField('Post')
| from flask_wtf import FlaskForm
from wtforms import StringField, PasswordField, SubmitField, BooleanField, TextField
from wtforms.validators import DataRequired, Length, Email, EqualTo, ValidationError
from flask_wtf.file import FileAllowed, FileField
class PostForm(FlaskForm):
title = StringField('Title', validators=[DataRequired()])
content = TextField('Content',validators=[DataRequired()])
submit = SubmitField('Post')
| none | 1 | 2.621645 | 3 | |
input/tokenizer.py | maxim5/code-inspector | 5 | 6619091 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
__author__ = 'maxim'
import enum
import string
def tokenize_by_char(text):
return [ch for ch in text]
def tokenize_by_lexems(text):
SPACE = 0
PUNCTUATION = 1
IDENTIFIER = 2
punctuation_chars = set(string.punctuation) - set('_')
identifier_chars = set(string.ascii_letters + string.digits + '_')
state = SPACE
prev = -1
for i, ch in enumerate(text):
is_punctuation = ch in punctuation_chars
is_identifier = ch in identifier_chars
is_space = ch == ' ' or ch == '\t' or ch == '\n'
if state == SPACE:
if prev >= 0:
yield text[prev:i]
prev = i
if state == PUNCTUATION:
if is_identifier or is_space:
yield text[prev:i]
prev = i
if state == IDENTIFIER:
if is_punctuation or is_space:
yield text[prev:i]
prev = i
if is_space:
state = SPACE
elif is_punctuation:
state = PUNCTUATION
elif is_identifier:
state = IDENTIFIER
if prev < len(text):
yield text[prev:]
class Mode(enum.Enum):
BY_CHAR = 0
BY_LEXEM = 1
def tokenize(text, mode=Mode.BY_LEXEM):
if mode == Mode.BY_LEXEM:
return tokenize_by_lexems(text)
else:
return tokenize_by_char(text)
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
__author__ = 'maxim'
import enum
import string
def tokenize_by_char(text):
return [ch for ch in text]
def tokenize_by_lexems(text):
SPACE = 0
PUNCTUATION = 1
IDENTIFIER = 2
punctuation_chars = set(string.punctuation) - set('_')
identifier_chars = set(string.ascii_letters + string.digits + '_')
state = SPACE
prev = -1
for i, ch in enumerate(text):
is_punctuation = ch in punctuation_chars
is_identifier = ch in identifier_chars
is_space = ch == ' ' or ch == '\t' or ch == '\n'
if state == SPACE:
if prev >= 0:
yield text[prev:i]
prev = i
if state == PUNCTUATION:
if is_identifier or is_space:
yield text[prev:i]
prev = i
if state == IDENTIFIER:
if is_punctuation or is_space:
yield text[prev:i]
prev = i
if is_space:
state = SPACE
elif is_punctuation:
state = PUNCTUATION
elif is_identifier:
state = IDENTIFIER
if prev < len(text):
yield text[prev:]
class Mode(enum.Enum):
BY_CHAR = 0
BY_LEXEM = 1
def tokenize(text, mode=Mode.BY_LEXEM):
if mode == Mode.BY_LEXEM:
return tokenize_by_lexems(text)
else:
return tokenize_by_char(text)
| en | 0.352855 | #!/usr/bin/env python # -*- coding: utf-8 -*- | 3.402038 | 3 |
dynamics/apis/__init__.py | fylein/ms-dynamics-business-central-sdk-py | 6 | 6619092 | from .vendors import Vendors
from .accounts import Accounts
from .invoices import PurchaseInvoices
from .invoice_line_items import PurchaseInvoiceLineItems
from .attachments import Attachments
__all__ = [
'Vendors',
'Accounts',
'PurchaseInvoices',
'PurchaseInvoiceLineItems',
'Attachments'
]
| from .vendors import Vendors
from .accounts import Accounts
from .invoices import PurchaseInvoices
from .invoice_line_items import PurchaseInvoiceLineItems
from .attachments import Attachments
__all__ = [
'Vendors',
'Accounts',
'PurchaseInvoices',
'PurchaseInvoiceLineItems',
'Attachments'
]
| none | 1 | 1.257396 | 1 | |
img_encryption.py | codehack9991/Information-Security | 2 | 6619093 | # -*- coding: utf-8 -*-
"""
Created on Mon Apr 1 10:25:30 2019
"""
from PIL import Image
from Crypto.Cipher import DES
filename = "hp.bmp"
format = "BMP"
filenameout = "hp1"
key = "<KEY>"
def pad(data):
return data + b"\x00"*(8-len(data)%8)
def convert_to_RGB(data):
r, g, b = tuple(map(lambda d: [data[i] for i in range(0,len(data)) if i % 3 == d], [0, 1, 2]))
pixels = tuple(zip(r,g,b))
return pixels
def process_image(filename):
im = Image.open(filename)
data = im.convert("RGB").tobytes()
original = len(data)
new = convert_to_RGB(des_cbc_encrypt(key, pad(data))[:original])
im2 = Image.new(im.mode, im.size)
im2.putdata(new)
im2.save(filenameout+"_cbc."+format, format)
new = convert_to_RGB(des_ecb_encrypt(key, pad(data))[:original])
im2 = Image.new(im.mode, im.size)
im2.putdata(new)
im2.save(filenameout+"_ecb."+format, format)
# CBC
def des_cbc_encrypt(key, data):
IV = b"A"*8
des = DES.new(key)
i=0
new_data=b""
while i<len(data):
IV = bytes(x ^ y for x, y in zip(IV, data[i:i+8]))
IV = des.encrypt(IV)
new_data = new_data+IV
i=i+8
return new_data
# ECB
def des_ecb_encrypt(key, data):
des = DES.new(key)
new_data = des.encrypt(data)
return new_data
process_image(filename) | # -*- coding: utf-8 -*-
"""
Created on Mon Apr 1 10:25:30 2019
"""
from PIL import Image
from Crypto.Cipher import DES
filename = "hp.bmp"
format = "BMP"
filenameout = "hp1"
key = "<KEY>"
def pad(data):
return data + b"\x00"*(8-len(data)%8)
def convert_to_RGB(data):
r, g, b = tuple(map(lambda d: [data[i] for i in range(0,len(data)) if i % 3 == d], [0, 1, 2]))
pixels = tuple(zip(r,g,b))
return pixels
def process_image(filename):
im = Image.open(filename)
data = im.convert("RGB").tobytes()
original = len(data)
new = convert_to_RGB(des_cbc_encrypt(key, pad(data))[:original])
im2 = Image.new(im.mode, im.size)
im2.putdata(new)
im2.save(filenameout+"_cbc."+format, format)
new = convert_to_RGB(des_ecb_encrypt(key, pad(data))[:original])
im2 = Image.new(im.mode, im.size)
im2.putdata(new)
im2.save(filenameout+"_ecb."+format, format)
# CBC
def des_cbc_encrypt(key, data):
IV = b"A"*8
des = DES.new(key)
i=0
new_data=b""
while i<len(data):
IV = bytes(x ^ y for x, y in zip(IV, data[i:i+8]))
IV = des.encrypt(IV)
new_data = new_data+IV
i=i+8
return new_data
# ECB
def des_ecb_encrypt(key, data):
des = DES.new(key)
new_data = des.encrypt(data)
return new_data
process_image(filename) | en | 0.754989 | # -*- coding: utf-8 -*- Created on Mon Apr 1 10:25:30 2019 # CBC # ECB | 3.255025 | 3 |
naivepyrunner/worker.py | henningjanssen/naivepyrunner | 0 | 6619094 | <gh_stars>0
from time import sleep, time
from collections import deque
from queue import Queue
# For DedicatedWorker
from threading import Lock, Timer
from .task import Task
class Worker(object):
def __init__(self, queue=None, tasks=None, *args, **kwargs):
self.running = False
self.queue = queue if queue else Queue()
self.tasks = tasks if tasks else Queue()
def run(self):
self.running = True
while self.running:
if self.step():
continue
sleep(0.1)
def step(self):
job = self.queue.pop_if_due()
if job and job.execute():
self.tasks.insert(job)
return bool(job)
def stop(self):
self.running = False
class DedicatedWorker(Worker):
def __init__(self, task, *args, **kwargs):
self.task = task
self.running = False
self.next_execution = time()
def run(self):
self.running = True
while self.running:
if self.next_execution <= time():
if not self.step():
self.running = False
continue
sleep(0.1)
def step(self):
sleep_time = self.task.execute()
if sleep_time < 0:
return False
self.next_execution = time() + sleep_time
return True
def stop(self):
self.running = False
self.task.stop()
| from time import sleep, time
from collections import deque
from queue import Queue
# For DedicatedWorker
from threading import Lock, Timer
from .task import Task
class Worker(object):
def __init__(self, queue=None, tasks=None, *args, **kwargs):
self.running = False
self.queue = queue if queue else Queue()
self.tasks = tasks if tasks else Queue()
def run(self):
self.running = True
while self.running:
if self.step():
continue
sleep(0.1)
def step(self):
job = self.queue.pop_if_due()
if job and job.execute():
self.tasks.insert(job)
return bool(job)
def stop(self):
self.running = False
class DedicatedWorker(Worker):
def __init__(self, task, *args, **kwargs):
self.task = task
self.running = False
self.next_execution = time()
def run(self):
self.running = True
while self.running:
if self.next_execution <= time():
if not self.step():
self.running = False
continue
sleep(0.1)
def step(self):
sleep_time = self.task.execute()
if sleep_time < 0:
return False
self.next_execution = time() + sleep_time
return True
def stop(self):
self.running = False
self.task.stop() | en | 0.426871 | # For DedicatedWorker | 3.133569 | 3 |
fadilah-batch001/apps/home.py | imfdlh/supermarket-gross-income-dashboard | 0 | 6619095 | <reponame>imfdlh/supermarket-gross-income-dashboard
import dash_html_components as html
import dash_bootstrap_components as dbc
layout = html.Div([
dbc.Container([
dbc.Row(
dbc.Col(
html.H1(
"Welcome to Supermarket Sales Dashboard!",
className="text-center"
),
className="mb-5 mt-5"
)
),
dbc.Row([
dbc.Col([
html.H5(
'This dashboard contains analysis of supermarket sales in Myanmar.',
),
html.H5(
children=[
'The dataset can be accessed from ',
html.A("here.", href="https://www.kaggle.com/aungpyaeap/supermarket-sales"),
]
),
html.Br(),
html.H5(
children=[
"The analysis is done by ",
html.Strong(
html.A("<NAME>", href="https://github.com/imfdlh",
className="link-nama"),
),
" as Milestone 1 submission for FTDS batch 001 Phase 0."
]
)
],
className="mb-4")
]),
dbc.Row([
dbc.Col(
dbc.Card([
dbc.CardImg(src="/assets/img/github.png", top=True),
dbc.CardBody(
[
html.H4("My Github", className="card-title"),
html.P(
"You can find this project under the link below, "
"also don't forget to visit my github for more repository!",
className="card-text",
),
dbc.Button("Fadilah's Milestone Repo", color="primary", href = "https://github.com/FTDS-001/MilestoneP0/tree/FadilahNurImani"),
]
),
],
style={"width": "18rem"},
),
className = "mb-5 mt-5"
),
dbc.Col(
dbc.Card([
dbc.CardImg(src="/assets/img/visualization.png", top=True),
dbc.CardBody(
[
html.H4("Visualization", className="card-title"),
html.P(
"To view visualization of supermarket sales data, visit the button below.",
className="card-text",
),
dbc.Button("Visualization", color="info", href = '/apps/visualization'),
]
),
],
style={"width": "18rem"},
),
className = "mb-5 mt-5"
),
dbc.Col(
dbc.Card([
dbc.CardImg(src="/assets/img/hypothesis.png", top=True),
dbc.CardBody(
[
html.H4("Hypothesis", className="card-title"),
html.P(
"To view hypothesis of supermarket sales data, visit the button below.",
className="card-text",
),
dbc.Button("Hypothesis", color="info", href = '/apps/hypothesis'),
]
),
],
style={"width": "18rem"},
),
className = "mb-5 mt-5"
)
])
])
]) | import dash_html_components as html
import dash_bootstrap_components as dbc
layout = html.Div([
dbc.Container([
dbc.Row(
dbc.Col(
html.H1(
"Welcome to Supermarket Sales Dashboard!",
className="text-center"
),
className="mb-5 mt-5"
)
),
dbc.Row([
dbc.Col([
html.H5(
'This dashboard contains analysis of supermarket sales in Myanmar.',
),
html.H5(
children=[
'The dataset can be accessed from ',
html.A("here.", href="https://www.kaggle.com/aungpyaeap/supermarket-sales"),
]
),
html.Br(),
html.H5(
children=[
"The analysis is done by ",
html.Strong(
html.A("<NAME>", href="https://github.com/imfdlh",
className="link-nama"),
),
" as Milestone 1 submission for FTDS batch 001 Phase 0."
]
)
],
className="mb-4")
]),
dbc.Row([
dbc.Col(
dbc.Card([
dbc.CardImg(src="/assets/img/github.png", top=True),
dbc.CardBody(
[
html.H4("My Github", className="card-title"),
html.P(
"You can find this project under the link below, "
"also don't forget to visit my github for more repository!",
className="card-text",
),
dbc.Button("Fadilah's Milestone Repo", color="primary", href = "https://github.com/FTDS-001/MilestoneP0/tree/FadilahNurImani"),
]
),
],
style={"width": "18rem"},
),
className = "mb-5 mt-5"
),
dbc.Col(
dbc.Card([
dbc.CardImg(src="/assets/img/visualization.png", top=True),
dbc.CardBody(
[
html.H4("Visualization", className="card-title"),
html.P(
"To view visualization of supermarket sales data, visit the button below.",
className="card-text",
),
dbc.Button("Visualization", color="info", href = '/apps/visualization'),
]
),
],
style={"width": "18rem"},
),
className = "mb-5 mt-5"
),
dbc.Col(
dbc.Card([
dbc.CardImg(src="/assets/img/hypothesis.png", top=True),
dbc.CardBody(
[
html.H4("Hypothesis", className="card-title"),
html.P(
"To view hypothesis of supermarket sales data, visit the button below.",
className="card-text",
),
dbc.Button("Hypothesis", color="info", href = '/apps/hypothesis'),
]
),
],
style={"width": "18rem"},
),
className = "mb-5 mt-5"
)
])
])
]) | none | 1 | 2.541317 | 3 | |
src/geo_testing/test_scripts/test_big.py | hpgl/hpgl | 70 | 6619096 | #
# Copyright 2009 HPGL Team
# This file is part of HPGL (High Perfomance Geostatistics Library).
# HPGL is free software: you can redistribute it and/or modify it under the terms of the BSD License.
# You should have received a copy of the BSD License along with HPGL.
#
from geo import *
from sys import *
import time
import os
if not os.path.exists("results/"):
os.mkdir("results/")
if not os.path.exists("results/medium/"):
os.mkdir("results/medium/")
print "loading double property..."
prop = load_cont_property("test_data/BIG_SOFT_DATA_160_141_20.INC", -99)
prop_cont = load_cont_property("test_data/BIG_SOFT_DATA_CON_160_141_20.INC", -99)
print "done"
grid = SugarboxGrid(166, 141, 20)
if (not "nook" in argv):
# while True:
prop2 = ordinary_kriging(prop_cont, grid,
radiuses = (20, 20, 20),
max_neighbours = 12,
covariance_type = covariance.exponential,
ranges = (10, 10, 10),
sill = 1)
write_property(prop2, "results/RESULT_OK_BIG.INC", "BIG_OK", -99)
del(prop2)
if (not "nosk" in argv):
prop3 = simple_kriging(prop_cont, grid,
radiuses = (20, 20, 20),
max_neighbours = 12,
covariance_type = covariance.exponential,
ranges = (10, 10, 10),
sill = 1,
mean = 1.6)
write_property(prop3, "results/RESULT_SK_BIG.INC", "BIG_SK", -99)
del(prop3)
if (not "nolvm" in argv):
mean_data = load_mean_data("test_data/mean_0.487_166_141_20.inc")
prop_lvm = lvm_kriging(prop_cont, grid, mean_data,
radiuses = (20, 20, 20),
max_neighbours = 12,
covariance_type = covariance.exponential,
ranges = (10, 10, 10),
sill = 1)
write_property(prop_lvm, "results/medium/lvm.inc", "lvm_medium", -99)
del(mean_data)
del(prop_lvm)
if (not "nosgs" in argv):
sgs_params = {
"radiuses": (20, 20, 20),
"max_neighbours": 12,
"covariance_type": covariance.exponential,
"ranges": (10, 10, 10),
"sill": 0.4,
"kriging_type": "sk"}
mean_data = load_mean_data("test_data/mean_0.487_166_141_20.inc")
lvm_sgs_params = {
"radiuses": (20, 20, 20),
"max_neighbours": 12,
"covariance_type": covariance.exponential,
"ranges": (10, 10, 10),
"sill": 0.4,
"mean_data": mean_data}
sgs_result1 = sgs_simulation(prop_cont, grid, seed=3439275, **sgs_params)
write_property(sgs_result1, "results/medium/SGS1.INC", "SGS1", -99)
sgs_result2 = sgs_simulation(prop_cont, grid, seed=24193421, **sgs_params)
write_property(sgs_result2, "results/medium/SGS2.INC", "SGS2", -99)
sgs_lvm = sgs_simulation(prop_cont, grid, seed=3439275, **lvm_sgs_params)
write_property(sgs_lvm, "results/medium/SGS_LVM.INC", "SGS_LVM", -99)
write_property(sgs_result2, "results/RESULt_SGS2_BIG.INC", "BIG_SGS2", -99)
del(sgs_result1)
del(sgs_result2)
del(sgs_lvm)
ik_prop = load_ind_property("test_data/BIG_SOFT_DATA_160_141_20.INC", -99, [0,1])
print 'Property data loaded.'
ik_data = [ {
"cov_type": 1,
"ranges": (10, 10, 10),
"sill": 1,
"radiuses": (20, 20, 20),
"max_neighbours": 12,
"marginal_prob": 0.8,
"value": 0
},
{
"cov_type": 1,
"ranges": (10, 10, 10),
"sill": 1,
"radiuses": (20, 20, 20),
"max_neighbours": 12,
"marginal_prob": 0.2,
"value": 1
}]
multi_ik_data = [
{
"cov_type": 0,
"ranges": (4, 4, 4),
"sill": 0.25,
"radiuses": (20, 20, 20),
"max_neighbours": 12,
"marginal_prob": 0.24,
"value": 0
},
{
"cov_type": 0,
"ranges": (6, 6, 6),
"sill": 0.25,
"radiuses": (20, 20, 20),
"max_neighbours": 12,
"marginal_prob": 0.235,
"value": 1
},
{
"cov_type": 0,
"ranges": (2, 2, 2),
"sill": 0.25,
"radiuses": (20, 20, 20),
"max_neighbours": 12,
"marginal_prob": 0.34,
"value": 2
},
{
"cov_type": 0,
"ranges": (10, 10, 10),
"sill": 0.25,
"radiuses": (20, 20, 20),
"max_neighbours": 12,
"marginal_prob": 0.18,
"value": 3
}]
if (not "noik" in argv):
t1 = time.time()
ik_result = indicator_kriging(ik_prop, grid, ik_data)
t2 = time.time()
print "Time: %s s." % (t2 - t1)
write_property(ik_result, "results/RESULT_IK_BIG_SOFT.INC", "BIG_IK", -99)
t1 = time.time()
ik_result = median_ik(ik_prop, grid, values=(0,1), marginal_probs = (0.8, 0.2), radiuses = (20, 20, 20), max_neighbours = 12, covariance_type=1, ranges=(10, 10, 10), sill = 1)
t2 = time.time()
print "Time: %s s." % (t2 - t1)
write_property(ik_result, "results/RESULT_MIK_BIG_SOFT.INC", "BIG_MIK", -99)
if (not "nosis" in argv):
sis_result = sis_simulation(ik_prop, grid, ik_data, seed=3241347)
write_property(sis_result, "results/RESULT_SIS_BIG_SOFT.INC", "SIS_RESULT_BIG_SOFT", -99)
stdin.readline()
| #
# Copyright 2009 HPGL Team
# This file is part of HPGL (High Perfomance Geostatistics Library).
# HPGL is free software: you can redistribute it and/or modify it under the terms of the BSD License.
# You should have received a copy of the BSD License along with HPGL.
#
from geo import *
from sys import *
import time
import os
if not os.path.exists("results/"):
os.mkdir("results/")
if not os.path.exists("results/medium/"):
os.mkdir("results/medium/")
print "loading double property..."
prop = load_cont_property("test_data/BIG_SOFT_DATA_160_141_20.INC", -99)
prop_cont = load_cont_property("test_data/BIG_SOFT_DATA_CON_160_141_20.INC", -99)
print "done"
grid = SugarboxGrid(166, 141, 20)
if (not "nook" in argv):
# while True:
prop2 = ordinary_kriging(prop_cont, grid,
radiuses = (20, 20, 20),
max_neighbours = 12,
covariance_type = covariance.exponential,
ranges = (10, 10, 10),
sill = 1)
write_property(prop2, "results/RESULT_OK_BIG.INC", "BIG_OK", -99)
del(prop2)
if (not "nosk" in argv):
prop3 = simple_kriging(prop_cont, grid,
radiuses = (20, 20, 20),
max_neighbours = 12,
covariance_type = covariance.exponential,
ranges = (10, 10, 10),
sill = 1,
mean = 1.6)
write_property(prop3, "results/RESULT_SK_BIG.INC", "BIG_SK", -99)
del(prop3)
if (not "nolvm" in argv):
mean_data = load_mean_data("test_data/mean_0.487_166_141_20.inc")
prop_lvm = lvm_kriging(prop_cont, grid, mean_data,
radiuses = (20, 20, 20),
max_neighbours = 12,
covariance_type = covariance.exponential,
ranges = (10, 10, 10),
sill = 1)
write_property(prop_lvm, "results/medium/lvm.inc", "lvm_medium", -99)
del(mean_data)
del(prop_lvm)
if (not "nosgs" in argv):
sgs_params = {
"radiuses": (20, 20, 20),
"max_neighbours": 12,
"covariance_type": covariance.exponential,
"ranges": (10, 10, 10),
"sill": 0.4,
"kriging_type": "sk"}
mean_data = load_mean_data("test_data/mean_0.487_166_141_20.inc")
lvm_sgs_params = {
"radiuses": (20, 20, 20),
"max_neighbours": 12,
"covariance_type": covariance.exponential,
"ranges": (10, 10, 10),
"sill": 0.4,
"mean_data": mean_data}
sgs_result1 = sgs_simulation(prop_cont, grid, seed=3439275, **sgs_params)
write_property(sgs_result1, "results/medium/SGS1.INC", "SGS1", -99)
sgs_result2 = sgs_simulation(prop_cont, grid, seed=24193421, **sgs_params)
write_property(sgs_result2, "results/medium/SGS2.INC", "SGS2", -99)
sgs_lvm = sgs_simulation(prop_cont, grid, seed=3439275, **lvm_sgs_params)
write_property(sgs_lvm, "results/medium/SGS_LVM.INC", "SGS_LVM", -99)
write_property(sgs_result2, "results/RESULt_SGS2_BIG.INC", "BIG_SGS2", -99)
del(sgs_result1)
del(sgs_result2)
del(sgs_lvm)
ik_prop = load_ind_property("test_data/BIG_SOFT_DATA_160_141_20.INC", -99, [0,1])
print 'Property data loaded.'
ik_data = [ {
"cov_type": 1,
"ranges": (10, 10, 10),
"sill": 1,
"radiuses": (20, 20, 20),
"max_neighbours": 12,
"marginal_prob": 0.8,
"value": 0
},
{
"cov_type": 1,
"ranges": (10, 10, 10),
"sill": 1,
"radiuses": (20, 20, 20),
"max_neighbours": 12,
"marginal_prob": 0.2,
"value": 1
}]
multi_ik_data = [
{
"cov_type": 0,
"ranges": (4, 4, 4),
"sill": 0.25,
"radiuses": (20, 20, 20),
"max_neighbours": 12,
"marginal_prob": 0.24,
"value": 0
},
{
"cov_type": 0,
"ranges": (6, 6, 6),
"sill": 0.25,
"radiuses": (20, 20, 20),
"max_neighbours": 12,
"marginal_prob": 0.235,
"value": 1
},
{
"cov_type": 0,
"ranges": (2, 2, 2),
"sill": 0.25,
"radiuses": (20, 20, 20),
"max_neighbours": 12,
"marginal_prob": 0.34,
"value": 2
},
{
"cov_type": 0,
"ranges": (10, 10, 10),
"sill": 0.25,
"radiuses": (20, 20, 20),
"max_neighbours": 12,
"marginal_prob": 0.18,
"value": 3
}]
if (not "noik" in argv):
t1 = time.time()
ik_result = indicator_kriging(ik_prop, grid, ik_data)
t2 = time.time()
print "Time: %s s." % (t2 - t1)
write_property(ik_result, "results/RESULT_IK_BIG_SOFT.INC", "BIG_IK", -99)
t1 = time.time()
ik_result = median_ik(ik_prop, grid, values=(0,1), marginal_probs = (0.8, 0.2), radiuses = (20, 20, 20), max_neighbours = 12, covariance_type=1, ranges=(10, 10, 10), sill = 1)
t2 = time.time()
print "Time: %s s." % (t2 - t1)
write_property(ik_result, "results/RESULT_MIK_BIG_SOFT.INC", "BIG_MIK", -99)
if (not "nosis" in argv):
sis_result = sis_simulation(ik_prop, grid, ik_data, seed=3241347)
write_property(sis_result, "results/RESULT_SIS_BIG_SOFT.INC", "SIS_RESULT_BIG_SOFT", -99)
stdin.readline()
| en | 0.918556 | # # Copyright 2009 HPGL Team # This file is part of HPGL (High Perfomance Geostatistics Library). # HPGL is free software: you can redistribute it and/or modify it under the terms of the BSD License. # You should have received a copy of the BSD License along with HPGL. # # while True: | 1.667114 | 2 |
presentations/session_4/graph_db_part_1/example_code/redis_graph_py3/site_configuration_py3.py | NanoDataCenter/nano_data_center | 2 | 6619097 | import json
class Retrieve_Site_ID( object ):
def __init__(self,path=""):
self.path = path
def read_configuration(self):
file = open(self.path+"/site_configuration.json","r")
config_data = json.load(file)
return config_data
if __name__ == "__main__":
retrive_class = Retrieve_Site_ID("redis_graph_py3")
print( retrive_class.read_configuration())
| import json
class Retrieve_Site_ID( object ):
def __init__(self,path=""):
self.path = path
def read_configuration(self):
file = open(self.path+"/site_configuration.json","r")
config_data = json.load(file)
return config_data
if __name__ == "__main__":
retrive_class = Retrieve_Site_ID("redis_graph_py3")
print( retrive_class.read_configuration())
| none | 1 | 2.703681 | 3 | |
Kinkajou/python/views/weixin.py | app858216291-github/Kinkajou-shop | 0 | 6619098 | <gh_stars>0
# -*- coding: utf-8 -*-
import requests
from flask import Blueprint, render_template, redirect,request,jsonify,send_from_directory,url_for,send_file,session
from model.modelBase import Jsonfy
import setting
import xmltodict
import time
import json
from wechatpy.utils import random_string
wx = Blueprint('wx',__name__)
IMAGE_FOLDER = 'static/upload/'
@wx.route('/index', methods=['POST','GET'])
def index():
openid=request.args.get('openid')
appid=setting.WeinXin.APP_ID
timestamp=int(time.time())
nonceStr=random_string(32)
from wechatpy.client import WeChatClient
# t=WeChatClient.jsapi.get_ticket()
# signature=WeChatClient.jsapi.get_jsapi_signature()
wc=WeChatClient(appid=setting.WeinXin.APP_ID,secret=setting.WeinXin.APP_SECRECT)
tick=wc.jsapi.get_jsapi_ticket()
print(tick)
print(nonceStr)
print(timestamp)
signature=wc.jsapi.get_jsapi_signature(nonceStr, tick,timestamp,'http://h5.heshihuan.cn/')
print(signature)
# signature=wxjspay.get_jsapi_params(openid,price=1.1,orderNum='1203333333')['sign']
config={}
config['appid']=appid
config['timestamp']=timestamp
config['nonceStr']=nonceStr
config['signature']=signature
config['url']='http://www.heshihuan.cn/#/pages/product/product?id=6'
return Jsonfy(data=config).__str__()
##http://127.0.0.1:5000/wx/mpPay?openid=oo0m04oxLhANWFBwMWezYRRRidzc
@wx.route('/mpPay', methods=['POST','GET'])
def mpPay():
openid = request.args.get('openid')
# wx=WX_PayToolUtil(APP_ID=setting.WeinXin.MP_APP_ID,MCH_ID=setting.WeinXin.MCH_ID,API_KEY=setting.WeinXin.API_KEY,NOTIFY_URL=setting.WeinXin.NOTIFY_URL)
# res=wx.getPayUrl(orderid=IOUtil.orderNo(),openid=openid,goodsPrice=1)
# pay = WeChatPay(appid=setting.WeinXin.MP_APP_ID, api_key=setting.WeinXin.API_KEY,sub_appid=setting.WeinXin.MP_APP_ID, mch_id=setting.WeinXin.MCH_ID)
# res = pay.order.create(
# trade_type="JSAPI",
# body="商品描述",
# total_fee=1,
# notify_url=setting.WeinXin.NOTIFY_URL,
# user_id=openid,
# out_trade_no=IOUtil.orderNo())
return Jsonfy(data=res).__str__()
@wx.route('/mpOpenId', methods=['POST','GET'])
def mpOpenId():
code=request.args.get('code')
parmas = {
'appid': setting.WeinXin.MP_APP_ID,
'secret': setting.WeinXin.MP_APP_SECRECT,
'js_code': code,
'grant_type': 'authorization_code'
}
url = 'https://api.weixin.qq.com/sns/jscode2session'
r = requests.get(url, params=parmas)
openid = r.json().get('openid', '')
return Jsonfy(data=openid).__str__()
import requests
import hashlib
import xmltodict
import time
import random
import string
class WX_PayToolUtil():
""" 微信支付工具 """
def __init__(self, APP_ID, MCH_ID, API_KEY, NOTIFY_URL):
self._APP_ID = APP_ID # 小程序ID
self._MCH_ID = MCH_ID # # 商户号
self._API_KEY = API_KEY
self._UFDODER_URL = "https://api.mch.weixin.qq.com/pay/unifiedorder" # 接口链接
self._NOTIFY_URL = NOTIFY_URL # 异步通知
def generate_sign(self, param):
'''生成签名'''
stringA = ''
ks = sorted(param.keys())
# 参数排序
for k in ks:
stringA += (k + '=' + param[k] + '&')
# 拼接商户KEY
stringSignTemp = stringA + "key=" + self._API_KEY
# md5加密,也可以用其他方式
hash_md5 = hashlib.md5(stringSignTemp.encode('utf8'))
sign = hash_md5.hexdigest().upper()
return sign
def getPayUrl(self, orderid, openid, goodsPrice, **kwargs):
"""向微信支付端发出请求,获取url"""
key = self._API_KEY
nonce_str = ''.join(random.sample(string.letters + string.digits, 30)) # 生成随机字符串,小于32位
params = {
'appid': self._APP_ID, # 小程序ID
'mch_id': self._MCH_ID, # 商户号
'nonce_str': nonce_str, # 随机字符串
"body": '测试订单', # 支付说明
'out_trade_no': orderid, # 生成的订单号
'total_fee': str(goodsPrice), # 标价金额
'spbill_create_ip': "127.0.0.1", # 小程序不能获取客户ip,web用socekt实现
'notify_url': self._NOTIFY_URL,
'trade_type': "JSAPI", # 支付类型
"openid": openid, # 用户id
}
# 生成签名
params['sign'] = self.generate_sign(params)
# python3一种写法
param = {'root': params}
xml = xmltodict.unparse(param)
response = requests.post(self._UFDODER_URL, data=xml.encode('utf-8'), headers={'Content-Type': 'text/xml'})
# xml 2 dict
msg = response.text
xmlmsg = xmltodict.parse(msg)
# 4. 获取prepay_id
if xmlmsg['xml']['return_code'] == 'SUCCESS':
if xmlmsg['xml']['result_code'] == 'SUCCESS':
prepay_id = xmlmsg['xml']['prepay_id']
# 时间戳
timeStamp = str(int(time.time()))
# 5. 五个参数
data = {
"appId": self._APP_ID,
"nonceStr": nonce_str,
"package": "prepay_id=" + prepay_id,
"signType": 'MD5',
"timeStamp": timeStamp,
}
# 6. paySign签名
paySign = self.generate_sign(data)
data["paySign"] = paySign # 加入签名
# 7. 传给前端的签名后的参数
return data
class wxjsconfig:
def index(self):
"""
知识中心
:return:
"""
id = self.get_argument('id', '')
getSignPackage = self.getSignPackage()
self.assign('getSignPackage', getSignPackage)
self.display('knowledge/index.html')
def getSignPackage(self):
# 获得jsapi_ticket
jsapiTicket = self.getJsApiTicket()
# 注意 URL 一定要动态获取,不能 hardcode.
# protocol = (!empty($_SERVER['HTTPS']) && $_SERVER['HTTPS'] !== 'off' || $_SERVER['SERVER_PORT'] == 443) ? "https://" : "http://";
# $url = "$protocol$_SERVER[HTTP_HOST]$_SERVER[REQUEST_URI]";
# 获取当前页面的url
url = "{}://{}{}".format(self.request.protocol, self.request.host, self.request.uri)
# 获取timestamp(时间戳)
timestamp = int(time.time())
# 获取noncestr(随机字符串)
nonceStr = self.createNonceStr()
# 这里参数的顺序要按照 key 值 ASCII 码升序排序
string = "jsapi_ticket={}&noncestr={}×tamp={}&url={}".format(jsapiTicket, nonceStr, timestamp, url)
# 得到signature
signature = hashlib.sha1(string).hexdigest();
wxinfo = self.getwx()
signPackage = {
"appId": wxinfo['appid'],
"nonceStr": nonceStr,
"timestamp": timestamp,
"url": url,
"signature": signature,
"rawString": string
}
return signPackage;
def createNonceStr(self, length=16):
# 获取noncestr(随机字符串)
import random
chars = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789"
str = "";
for i in range(0, 16):
str += chars[random.randint(0, len(chars) - 1):random.randint(0, len(chars) - 1) + 1]
# for ($i = 0; $i < $length; $i++) {
# $str .= substr($chars, mt_rand(0, strlen($chars) - 1), 1);
# }
return str;
def getJsApiTicket(self):
# 获得jsapi_ticket
# 获得jsapi_ticket之后,就可以生成JS-SDK权限验证的签名了
import urllib.request
# jsapi_ticket 应该全局存储与更新,以下代码以写入到文件中做示例
# cookie('ticket',null);
# 获取access_token
accessToken = self.accesstokens()
# 如果是企业号用以下 URL 获取 ticket
# $url = "https://qyapi.weixin.qq.com/cgi-bin/get_jsapi_ticket?access_token=$accessToken";
# 获取jsapi_ticket
url = "https://api.weixin.qq.com/cgi-bin/ticket/getticket?access_token={}&type=jsapi".format(accessToken)
req = request.Request(url)
res_data = request.urlopen(req)
res = res_data.read()
res = json.dumps(res)
return str(res['ticket'])
##公众号回复和验证
@wx.route('/token',methods=['GET','POST'])
def token():
token = '<PASSWORD>'
echostr = request.args.get('echostr')
return echostr
#return "8337549056785503916"
print("请求进来了")
import hashlib
if request.method == 'GET':
try:
signature = request.args.get('signature')
print(signature)
timestamp = request.args.get('timestamp')
print(timestamp)
echostr = request.args.get('echostr')
nonce = request.args.get('nonce')
list = [token, timestamp, nonce]
list.sort()
sha1 = hashlib.sha1()
map(sha1.update, list)
hashcode = sha1.hexdigest()
print("handle/GET func: hashcode, signature: ", hashcode, signature)
if hashcode == signature:
print("成功")
return echostr
else:
return ""
except Exception:
print("异常")
return ""
print("1")
signature = request.args.get('signature')
print(signature)
timestamp = request.args.get('timestamp')
print(timestamp)
echostr = request.args.get('echostr')
nonce = request.args.get('nonce')
print(nonce)
openid = request.args.get('openid')
session[request.args.get('openid')] = openid
print("open获取")
print(openid)
if len(request.args)==0:
return "hello, this is handle view"
list = [token, timestamp, nonce]
list.sort()
s = list[0]+list[1]+list[2]
hashcode = hashlib.sha1(s.encode('utf-8')).hexdigest()
if hashcode == signature:
print("请求结束1")
print(echostr)
return str(echostr)
else:
print('验证失败')
return "hi"
print("请求结束")
##消息回复
if request.method=="POST":
print("open获取Post")
openid = request.args.get('openid');
session['openid'] = openid
print(openid)
# 表示微信服务器转发消息过来
xml_str = request.data
if not xml_str:
return ""
# 对xml字符串进行解析
xml_dict = xmltodict.parse(xml_str)
xml_dict = xml_dict.get("xml")
# 提取消息类型
msg_type = xml_dict.get("MsgType")
if msg_type == "text":
# 表示发送的是文本消息
# 构造返回值,经由微信服务器回复给用户的消息内容
resp_dict = {
"xml": {
"ToUserName": xml_dict.get("FromUserName"),
"FromUserName": xml_dict.get("ToUserName"),
"CreateTime": int(time.time()),
"MsgType": "text",
"Content": "you say:" + xml_dict.get("Content")
}
}
# 将字典转换为xml字符串
resp_xml_str = xmltodict.unparse(resp_dict)
# 返回消息数据给微信服务器
return resp_xml_str
else:
resp_dict = {
"xml": {
"ToUserName": xml_dict.get("FromUserName"),
"FromUserName": xml_dict.get("ToUserName"),
"CreateTime": int(time.time()),
"MsgType": "text",
"Content": "Dear I Love you so much"
}
}
resp_xml_str = xmltodict.unparse(resp_dict)
# 返回消息数据给微信服务器
return resp_xml_str
# print("2")
| # -*- coding: utf-8 -*-
import requests
from flask import Blueprint, render_template, redirect,request,jsonify,send_from_directory,url_for,send_file,session
from model.modelBase import Jsonfy
import setting
import xmltodict
import time
import json
from wechatpy.utils import random_string
wx = Blueprint('wx',__name__)
IMAGE_FOLDER = 'static/upload/'
@wx.route('/index', methods=['POST','GET'])
def index():
openid=request.args.get('openid')
appid=setting.WeinXin.APP_ID
timestamp=int(time.time())
nonceStr=random_string(32)
from wechatpy.client import WeChatClient
# t=WeChatClient.jsapi.get_ticket()
# signature=WeChatClient.jsapi.get_jsapi_signature()
wc=WeChatClient(appid=setting.WeinXin.APP_ID,secret=setting.WeinXin.APP_SECRECT)
tick=wc.jsapi.get_jsapi_ticket()
print(tick)
print(nonceStr)
print(timestamp)
signature=wc.jsapi.get_jsapi_signature(nonceStr, tick,timestamp,'http://h5.heshihuan.cn/')
print(signature)
# signature=wxjspay.get_jsapi_params(openid,price=1.1,orderNum='1203333333')['sign']
config={}
config['appid']=appid
config['timestamp']=timestamp
config['nonceStr']=nonceStr
config['signature']=signature
config['url']='http://www.heshihuan.cn/#/pages/product/product?id=6'
return Jsonfy(data=config).__str__()
##http://127.0.0.1:5000/wx/mpPay?openid=oo0m04oxLhANWFBwMWezYRRRidzc
@wx.route('/mpPay', methods=['POST','GET'])
def mpPay():
openid = request.args.get('openid')
# wx=WX_PayToolUtil(APP_ID=setting.WeinXin.MP_APP_ID,MCH_ID=setting.WeinXin.MCH_ID,API_KEY=setting.WeinXin.API_KEY,NOTIFY_URL=setting.WeinXin.NOTIFY_URL)
# res=wx.getPayUrl(orderid=IOUtil.orderNo(),openid=openid,goodsPrice=1)
# pay = WeChatPay(appid=setting.WeinXin.MP_APP_ID, api_key=setting.WeinXin.API_KEY,sub_appid=setting.WeinXin.MP_APP_ID, mch_id=setting.WeinXin.MCH_ID)
# res = pay.order.create(
# trade_type="JSAPI",
# body="商品描述",
# total_fee=1,
# notify_url=setting.WeinXin.NOTIFY_URL,
# user_id=openid,
# out_trade_no=IOUtil.orderNo())
return Jsonfy(data=res).__str__()
@wx.route('/mpOpenId', methods=['POST','GET'])
def mpOpenId():
code=request.args.get('code')
parmas = {
'appid': setting.WeinXin.MP_APP_ID,
'secret': setting.WeinXin.MP_APP_SECRECT,
'js_code': code,
'grant_type': 'authorization_code'
}
url = 'https://api.weixin.qq.com/sns/jscode2session'
r = requests.get(url, params=parmas)
openid = r.json().get('openid', '')
return Jsonfy(data=openid).__str__()
import requests
import hashlib
import xmltodict
import time
import random
import string
class WX_PayToolUtil():
""" 微信支付工具 """
def __init__(self, APP_ID, MCH_ID, API_KEY, NOTIFY_URL):
self._APP_ID = APP_ID # 小程序ID
self._MCH_ID = MCH_ID # # 商户号
self._API_KEY = API_KEY
self._UFDODER_URL = "https://api.mch.weixin.qq.com/pay/unifiedorder" # 接口链接
self._NOTIFY_URL = NOTIFY_URL # 异步通知
def generate_sign(self, param):
'''生成签名'''
stringA = ''
ks = sorted(param.keys())
# 参数排序
for k in ks:
stringA += (k + '=' + param[k] + '&')
# 拼接商户KEY
stringSignTemp = stringA + "key=" + self._API_KEY
# md5加密,也可以用其他方式
hash_md5 = hashlib.md5(stringSignTemp.encode('utf8'))
sign = hash_md5.hexdigest().upper()
return sign
def getPayUrl(self, orderid, openid, goodsPrice, **kwargs):
"""向微信支付端发出请求,获取url"""
key = self._API_KEY
nonce_str = ''.join(random.sample(string.letters + string.digits, 30)) # 生成随机字符串,小于32位
params = {
'appid': self._APP_ID, # 小程序ID
'mch_id': self._MCH_ID, # 商户号
'nonce_str': nonce_str, # 随机字符串
"body": '测试订单', # 支付说明
'out_trade_no': orderid, # 生成的订单号
'total_fee': str(goodsPrice), # 标价金额
'spbill_create_ip': "127.0.0.1", # 小程序不能获取客户ip,web用socekt实现
'notify_url': self._NOTIFY_URL,
'trade_type': "JSAPI", # 支付类型
"openid": openid, # 用户id
}
# 生成签名
params['sign'] = self.generate_sign(params)
# python3一种写法
param = {'root': params}
xml = xmltodict.unparse(param)
response = requests.post(self._UFDODER_URL, data=xml.encode('utf-8'), headers={'Content-Type': 'text/xml'})
# xml 2 dict
msg = response.text
xmlmsg = xmltodict.parse(msg)
# 4. 获取prepay_id
if xmlmsg['xml']['return_code'] == 'SUCCESS':
if xmlmsg['xml']['result_code'] == 'SUCCESS':
prepay_id = xmlmsg['xml']['prepay_id']
# 时间戳
timeStamp = str(int(time.time()))
# 5. 五个参数
data = {
"appId": self._APP_ID,
"nonceStr": nonce_str,
"package": "prepay_id=" + prepay_id,
"signType": 'MD5',
"timeStamp": timeStamp,
}
# 6. paySign签名
paySign = self.generate_sign(data)
data["paySign"] = paySign # 加入签名
# 7. 传给前端的签名后的参数
return data
class wxjsconfig:
def index(self):
"""
知识中心
:return:
"""
id = self.get_argument('id', '')
getSignPackage = self.getSignPackage()
self.assign('getSignPackage', getSignPackage)
self.display('knowledge/index.html')
def getSignPackage(self):
# 获得jsapi_ticket
jsapiTicket = self.getJsApiTicket()
# 注意 URL 一定要动态获取,不能 hardcode.
# protocol = (!empty($_SERVER['HTTPS']) && $_SERVER['HTTPS'] !== 'off' || $_SERVER['SERVER_PORT'] == 443) ? "https://" : "http://";
# $url = "$protocol$_SERVER[HTTP_HOST]$_SERVER[REQUEST_URI]";
# 获取当前页面的url
url = "{}://{}{}".format(self.request.protocol, self.request.host, self.request.uri)
# 获取timestamp(时间戳)
timestamp = int(time.time())
# 获取noncestr(随机字符串)
nonceStr = self.createNonceStr()
# 这里参数的顺序要按照 key 值 ASCII 码升序排序
string = "jsapi_ticket={}&noncestr={}×tamp={}&url={}".format(jsapiTicket, nonceStr, timestamp, url)
# 得到signature
signature = hashlib.sha1(string).hexdigest();
wxinfo = self.getwx()
signPackage = {
"appId": wxinfo['appid'],
"nonceStr": nonceStr,
"timestamp": timestamp,
"url": url,
"signature": signature,
"rawString": string
}
return signPackage;
def createNonceStr(self, length=16):
# 获取noncestr(随机字符串)
import random
chars = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789"
str = "";
for i in range(0, 16):
str += chars[random.randint(0, len(chars) - 1):random.randint(0, len(chars) - 1) + 1]
# for ($i = 0; $i < $length; $i++) {
# $str .= substr($chars, mt_rand(0, strlen($chars) - 1), 1);
# }
return str;
def getJsApiTicket(self):
# 获得jsapi_ticket
# 获得jsapi_ticket之后,就可以生成JS-SDK权限验证的签名了
import urllib.request
# jsapi_ticket 应该全局存储与更新,以下代码以写入到文件中做示例
# cookie('ticket',null);
# 获取access_token
accessToken = self.accesstokens()
# 如果是企业号用以下 URL 获取 ticket
# $url = "https://qyapi.weixin.qq.com/cgi-bin/get_jsapi_ticket?access_token=$accessToken";
# 获取jsapi_ticket
url = "https://api.weixin.qq.com/cgi-bin/ticket/getticket?access_token={}&type=jsapi".format(accessToken)
req = request.Request(url)
res_data = request.urlopen(req)
res = res_data.read()
res = json.dumps(res)
return str(res['ticket'])
##公众号回复和验证
@wx.route('/token',methods=['GET','POST'])
def token():
token = '<PASSWORD>'
echostr = request.args.get('echostr')
return echostr
#return "8337549056785503916"
print("请求进来了")
import hashlib
if request.method == 'GET':
try:
signature = request.args.get('signature')
print(signature)
timestamp = request.args.get('timestamp')
print(timestamp)
echostr = request.args.get('echostr')
nonce = request.args.get('nonce')
list = [token, timestamp, nonce]
list.sort()
sha1 = hashlib.sha1()
map(sha1.update, list)
hashcode = sha1.hexdigest()
print("handle/GET func: hashcode, signature: ", hashcode, signature)
if hashcode == signature:
print("成功")
return echostr
else:
return ""
except Exception:
print("异常")
return ""
print("1")
signature = request.args.get('signature')
print(signature)
timestamp = request.args.get('timestamp')
print(timestamp)
echostr = request.args.get('echostr')
nonce = request.args.get('nonce')
print(nonce)
openid = request.args.get('openid')
session[request.args.get('openid')] = openid
print("open获取")
print(openid)
if len(request.args)==0:
return "hello, this is handle view"
list = [token, timestamp, nonce]
list.sort()
s = list[0]+list[1]+list[2]
hashcode = hashlib.sha1(s.encode('utf-8')).hexdigest()
if hashcode == signature:
print("请求结束1")
print(echostr)
return str(echostr)
else:
print('验证失败')
return "hi"
print("请求结束")
##消息回复
if request.method=="POST":
print("open获取Post")
openid = request.args.get('openid');
session['openid'] = openid
print(openid)
# 表示微信服务器转发消息过来
xml_str = request.data
if not xml_str:
return ""
# 对xml字符串进行解析
xml_dict = xmltodict.parse(xml_str)
xml_dict = xml_dict.get("xml")
# 提取消息类型
msg_type = xml_dict.get("MsgType")
if msg_type == "text":
# 表示发送的是文本消息
# 构造返回值,经由微信服务器回复给用户的消息内容
resp_dict = {
"xml": {
"ToUserName": xml_dict.get("FromUserName"),
"FromUserName": xml_dict.get("ToUserName"),
"CreateTime": int(time.time()),
"MsgType": "text",
"Content": "you say:" + xml_dict.get("Content")
}
}
# 将字典转换为xml字符串
resp_xml_str = xmltodict.unparse(resp_dict)
# 返回消息数据给微信服务器
return resp_xml_str
else:
resp_dict = {
"xml": {
"ToUserName": xml_dict.get("FromUserName"),
"FromUserName": xml_dict.get("ToUserName"),
"CreateTime": int(time.time()),
"MsgType": "text",
"Content": "Dear I Love you so much"
}
}
resp_xml_str = xmltodict.unparse(resp_dict)
# 返回消息数据给微信服务器
return resp_xml_str
# print("2") | zh | 0.427783 | # -*- coding: utf-8 -*- # t=WeChatClient.jsapi.get_ticket() # signature=WeChatClient.jsapi.get_jsapi_signature() # signature=wxjspay.get_jsapi_params(openid,price=1.1,orderNum='1203333333')['sign'] #/pages/product/product?id=6' ##http://127.0.0.1:5000/wx/mpPay?openid=oo0m04oxLhANWFBwMWezYRRRidzc # wx=WX_PayToolUtil(APP_ID=setting.WeinXin.MP_APP_ID,MCH_ID=setting.WeinXin.MCH_ID,API_KEY=setting.WeinXin.API_KEY,NOTIFY_URL=setting.WeinXin.NOTIFY_URL) # res=wx.getPayUrl(orderid=IOUtil.orderNo(),openid=openid,goodsPrice=1) # pay = WeChatPay(appid=setting.WeinXin.MP_APP_ID, api_key=setting.WeinXin.API_KEY,sub_appid=setting.WeinXin.MP_APP_ID, mch_id=setting.WeinXin.MCH_ID) # res = pay.order.create( # trade_type="JSAPI", # body="商品描述", # total_fee=1, # notify_url=setting.WeinXin.NOTIFY_URL, # user_id=openid, # out_trade_no=IOUtil.orderNo()) 微信支付工具 # 小程序ID # # 商户号 # 接口链接 # 异步通知 生成签名 # 参数排序 # 拼接商户KEY # md5加密,也可以用其他方式 向微信支付端发出请求,获取url # 生成随机字符串,小于32位 # 小程序ID # 商户号 # 随机字符串 # 支付说明 # 生成的订单号 # 标价金额 # 小程序不能获取客户ip,web用socekt实现 # 支付类型 # 用户id # 生成签名 # python3一种写法 # xml 2 dict # 4. 获取prepay_id # 时间戳 # 5. 五个参数 # 6. paySign签名 # 加入签名 # 7. 传给前端的签名后的参数 知识中心
:return: # 获得jsapi_ticket # 注意 URL 一定要动态获取,不能 hardcode. # protocol = (!empty($_SERVER['HTTPS']) && $_SERVER['HTTPS'] !== 'off' || $_SERVER['SERVER_PORT'] == 443) ? "https://" : "http://"; # $url = "$protocol$_SERVER[HTTP_HOST]$_SERVER[REQUEST_URI]"; # 获取当前页面的url # 获取timestamp(时间戳) # 获取noncestr(随机字符串) # 这里参数的顺序要按照 key 值 ASCII 码升序排序 # 得到signature # 获取noncestr(随机字符串) # for ($i = 0; $i < $length; $i++) { # $str .= substr($chars, mt_rand(0, strlen($chars) - 1), 1); # } # 获得jsapi_ticket # 获得jsapi_ticket之后,就可以生成JS-SDK权限验证的签名了 # jsapi_ticket 应该全局存储与更新,以下代码以写入到文件中做示例 # cookie('ticket',null); # 获取access_token # 如果是企业号用以下 URL 获取 ticket # $url = "https://qyapi.weixin.qq.com/cgi-bin/get_jsapi_ticket?access_token=$accessToken"; # 获取jsapi_ticket ##公众号回复和验证 #return "8337549056785503916" ##消息回复 # 表示微信服务器转发消息过来 # 对xml字符串进行解析 # 提取消息类型 # 表示发送的是文本消息 # 构造返回值,经由微信服务器回复给用户的消息内容 # 将字典转换为xml字符串 # 返回消息数据给微信服务器 # 返回消息数据给微信服务器 # print("2") | 1.882308 | 2 |
virtool/users/sessions.py | ReeceHoffmann/virtool | 39 | 6619099 | import hashlib
import secrets
from typing import Optional, Tuple
import arrow
import virtool.db.utils
import virtool.utils
from virtool.db.core import DB
async def create_session(
db, ip: str, user_id: Optional[str] = None, remember: Optional[bool] = False
) -> Tuple[dict, str]:
session_id = await create_session_id(db)
utc = arrow.utcnow()
if user_id and remember:
expires_at = utc.shift(days=30)
elif user_id:
expires_at = utc.shift(minutes=60)
else:
expires_at = utc.shift(minutes=10)
session = {
"_id": session_id,
"created_at": virtool.utils.timestamp(),
"expiresAt": expires_at.datetime,
"ip": ip,
}
token = None
if user_id:
token, hashed = virtool.utils.generate_key()
user_document = await db.users.find_one(user_id)
session.update(
{
"token": hashed,
"administrator": user_document["administrator"],
"groups": user_document["groups"],
"permissions": user_document["permissions"],
"force_reset": user_document["force_reset"],
"user": {"id": user_id},
}
)
await db.sessions.insert_one(session)
return session, token
async def create_session_id(db: virtool.db.core.DB) -> str:
"""
Create a new unique session id.
:param db: the application database client
:return: a session id
"""
session_id = secrets.token_hex(32)
if await db.sessions.count_documents({"_id": session_id}):
return await create_session_id(db)
return session_id
async def get_session(
db: DB, session_id: str, session_token: str
) -> Tuple[Optional[dict], Optional[str]]:
"""
Get a session and token by its id and token.
If the passed `session_token` is `None`, an unauthenticated session document matching the
`session_id` will be returned. If the matching session is authenticated and token is passed,
`None` will be returned.
Will return `None` if the session doesn't exist or the session id and token do not go together.
:param db: the application database client
:param session_id: the session id
:param session_token: the token for the session
:return: a session document
"""
document = await db.sessions.find_one({"_id": session_id})
if document is None:
return None, None
try:
document_token = document["token"]
except KeyError:
return document, None
if session_token is None:
return None, None
hashed_token = hashlib.sha256(session_token.encode()).hexdigest()
if document_token == hashed_token:
return document, session_token
async def create_reset_code(
db, session_id: str, user_id: str, remember: Optional[bool] = False
) -> int:
"""
Create a secret code that is used to verify a password reset request. Properties:
- the reset request must pass a reset code that is associated with the session linked to the
request
- the reset code is dropped from the session for any non-reset request sent after the code was
generated
:param db:
:param session_id:
:param user_id:
:param remember:
:return:
"""
reset_code = secrets.token_hex(32)
await db.sessions.update_one(
{"_id": session_id},
{
"$set": {
"reset_code": reset_code,
"reset_remember": remember,
"reset_user_id": user_id,
}
},
)
return reset_code
async def clear_reset_code(db: virtool.db.core.DB, session_id: str):
"""
Clear the reset information attached to the session associated with the passed `session_id`.
:param db: the application database client
:param session_id: the session id
"""
await db.sessions.update_one(
{"_id": session_id},
{"$unset": {"reset_code": "", "reset_remember": "", "reset_user_id": ""}},
)
async def replace_session(
db: virtool.db.core.DB,
session_id: str,
ip: str,
user_id: Optional[str] = None,
remember: Optional[bool] = False,
) -> Tuple[dict, str]:
"""
Replace the session associated with `session_id` with a new one. Return the new session
document.
Supplying a `user_id` indicates the session is authenticated. Setting `remember` will make the
session last for 30 days instead of the default 30 minutes.
:param db: the application database client
:param session_id: the id of the session to replace
:param ip:
:param user_id:
:param remember:
:return: new session document and token
"""
await db.sessions.delete_one({"_id": session_id})
return await create_session(db, ip, user_id, remember=remember)
| import hashlib
import secrets
from typing import Optional, Tuple
import arrow
import virtool.db.utils
import virtool.utils
from virtool.db.core import DB
async def create_session(
db, ip: str, user_id: Optional[str] = None, remember: Optional[bool] = False
) -> Tuple[dict, str]:
session_id = await create_session_id(db)
utc = arrow.utcnow()
if user_id and remember:
expires_at = utc.shift(days=30)
elif user_id:
expires_at = utc.shift(minutes=60)
else:
expires_at = utc.shift(minutes=10)
session = {
"_id": session_id,
"created_at": virtool.utils.timestamp(),
"expiresAt": expires_at.datetime,
"ip": ip,
}
token = None
if user_id:
token, hashed = virtool.utils.generate_key()
user_document = await db.users.find_one(user_id)
session.update(
{
"token": hashed,
"administrator": user_document["administrator"],
"groups": user_document["groups"],
"permissions": user_document["permissions"],
"force_reset": user_document["force_reset"],
"user": {"id": user_id},
}
)
await db.sessions.insert_one(session)
return session, token
async def create_session_id(db: virtool.db.core.DB) -> str:
"""
Create a new unique session id.
:param db: the application database client
:return: a session id
"""
session_id = secrets.token_hex(32)
if await db.sessions.count_documents({"_id": session_id}):
return await create_session_id(db)
return session_id
async def get_session(
db: DB, session_id: str, session_token: str
) -> Tuple[Optional[dict], Optional[str]]:
"""
Get a session and token by its id and token.
If the passed `session_token` is `None`, an unauthenticated session document matching the
`session_id` will be returned. If the matching session is authenticated and token is passed,
`None` will be returned.
Will return `None` if the session doesn't exist or the session id and token do not go together.
:param db: the application database client
:param session_id: the session id
:param session_token: the token for the session
:return: a session document
"""
document = await db.sessions.find_one({"_id": session_id})
if document is None:
return None, None
try:
document_token = document["token"]
except KeyError:
return document, None
if session_token is None:
return None, None
hashed_token = hashlib.sha256(session_token.encode()).hexdigest()
if document_token == hashed_token:
return document, session_token
async def create_reset_code(
db, session_id: str, user_id: str, remember: Optional[bool] = False
) -> int:
"""
Create a secret code that is used to verify a password reset request. Properties:
- the reset request must pass a reset code that is associated with the session linked to the
request
- the reset code is dropped from the session for any non-reset request sent after the code was
generated
:param db:
:param session_id:
:param user_id:
:param remember:
:return:
"""
reset_code = secrets.token_hex(32)
await db.sessions.update_one(
{"_id": session_id},
{
"$set": {
"reset_code": reset_code,
"reset_remember": remember,
"reset_user_id": user_id,
}
},
)
return reset_code
async def clear_reset_code(db: virtool.db.core.DB, session_id: str):
"""
Clear the reset information attached to the session associated with the passed `session_id`.
:param db: the application database client
:param session_id: the session id
"""
await db.sessions.update_one(
{"_id": session_id},
{"$unset": {"reset_code": "", "reset_remember": "", "reset_user_id": ""}},
)
async def replace_session(
db: virtool.db.core.DB,
session_id: str,
ip: str,
user_id: Optional[str] = None,
remember: Optional[bool] = False,
) -> Tuple[dict, str]:
"""
Replace the session associated with `session_id` with a new one. Return the new session
document.
Supplying a `user_id` indicates the session is authenticated. Setting `remember` will make the
session last for 30 days instead of the default 30 minutes.
:param db: the application database client
:param session_id: the id of the session to replace
:param ip:
:param user_id:
:param remember:
:return: new session document and token
"""
await db.sessions.delete_one({"_id": session_id})
return await create_session(db, ip, user_id, remember=remember)
| en | 0.751096 | Create a new unique session id. :param db: the application database client :return: a session id Get a session and token by its id and token. If the passed `session_token` is `None`, an unauthenticated session document matching the `session_id` will be returned. If the matching session is authenticated and token is passed, `None` will be returned. Will return `None` if the session doesn't exist or the session id and token do not go together. :param db: the application database client :param session_id: the session id :param session_token: the token for the session :return: a session document Create a secret code that is used to verify a password reset request. Properties: - the reset request must pass a reset code that is associated with the session linked to the request - the reset code is dropped from the session for any non-reset request sent after the code was generated :param db: :param session_id: :param user_id: :param remember: :return: Clear the reset information attached to the session associated with the passed `session_id`. :param db: the application database client :param session_id: the session id Replace the session associated with `session_id` with a new one. Return the new session document. Supplying a `user_id` indicates the session is authenticated. Setting `remember` will make the session last for 30 days instead of the default 30 minutes. :param db: the application database client :param session_id: the id of the session to replace :param ip: :param user_id: :param remember: :return: new session document and token | 2.162366 | 2 |
python3/20.valid-parentheses.210968330.ac.py | Diego-Zulu/leetcode_answers | 0 | 6619100 | #
# @lc app=leetcode id=20 lang=python3
#
# [20] Valid Parentheses
#
# https://leetcode.com/problems/valid-parentheses/description/
#
# algorithms
# Easy (38.45%)
# Likes: 4630
# Dislikes: 210
# Total Accepted: 947.4K
# Total Submissions: 2.5M
# Testcase Example: '"()"'
#
# Given a string containing just the characters '(', ')', '{', '}', '[' and
# ']', determine if the input string is valid.
#
# An input string is valid if:
#
#
# Open brackets must be closed by the same type of brackets.
# Open brackets must be closed in the correct order.
#
#
# Note that an empty string is also considered valid.
#
# Example 1:
#
#
# Input: "()"
# Output: true
#
#
# Example 2:
#
#
# Input: "()[]{}"
# Output: true
#
#
# Example 3:
#
#
# Input: "(]"
# Output: false
#
#
# Example 4:
#
#
# Input: "([)]"
# Output: false
#
#
# Example 5:
#
#
# Input: "{[]}"
# Output: true
#
#
#
# @lc code=start
open_equiv = {
']': '[',
'}': '{',
')': '(',
'-': '-',
}
class Solution:
def isValid(self, s: str) -> bool:
stack = []
for c in s:
if c in "([{":
stack.append(c)
else:
last = stack.pop() if stack else '-'
if open_equiv[c] != last:
return False
return not stack
# @lc code=end
| #
# @lc app=leetcode id=20 lang=python3
#
# [20] Valid Parentheses
#
# https://leetcode.com/problems/valid-parentheses/description/
#
# algorithms
# Easy (38.45%)
# Likes: 4630
# Dislikes: 210
# Total Accepted: 947.4K
# Total Submissions: 2.5M
# Testcase Example: '"()"'
#
# Given a string containing just the characters '(', ')', '{', '}', '[' and
# ']', determine if the input string is valid.
#
# An input string is valid if:
#
#
# Open brackets must be closed by the same type of brackets.
# Open brackets must be closed in the correct order.
#
#
# Note that an empty string is also considered valid.
#
# Example 1:
#
#
# Input: "()"
# Output: true
#
#
# Example 2:
#
#
# Input: "()[]{}"
# Output: true
#
#
# Example 3:
#
#
# Input: "(]"
# Output: false
#
#
# Example 4:
#
#
# Input: "([)]"
# Output: false
#
#
# Example 5:
#
#
# Input: "{[]}"
# Output: true
#
#
#
# @lc code=start
open_equiv = {
']': '[',
'}': '{',
')': '(',
'-': '-',
}
class Solution:
def isValid(self, s: str) -> bool:
stack = []
for c in s:
if c in "([{":
stack.append(c)
else:
last = stack.pop() if stack else '-'
if open_equiv[c] != last:
return False
return not stack
# @lc code=end
| en | 0.567167 | # # @lc app=leetcode id=20 lang=python3 # # [20] Valid Parentheses # # https://leetcode.com/problems/valid-parentheses/description/ # # algorithms # Easy (38.45%) # Likes: 4630 # Dislikes: 210 # Total Accepted: 947.4K # Total Submissions: 2.5M # Testcase Example: '"()"' # # Given a string containing just the characters '(', ')', '{', '}', '[' and # ']', determine if the input string is valid. # # An input string is valid if: # # # Open brackets must be closed by the same type of brackets. # Open brackets must be closed in the correct order. # # # Note that an empty string is also considered valid. # # Example 1: # # # Input: "()" # Output: true # # # Example 2: # # # Input: "()[]{}" # Output: true # # # Example 3: # # # Input: "(]" # Output: false # # # Example 4: # # # Input: "([)]" # Output: false # # # Example 5: # # # Input: "{[]}" # Output: true # # # # @lc code=start # @lc code=end | 3.586318 | 4 |
migrations/versions/90a07ecb7dc7_adding_is_parent_session_and_child_id_.py | yaelmi3/backslash | 17 | 6619101 | <reponame>yaelmi3/backslash
"""Adding is_parent_session and child_id to session model
Revision ID: 90a07ecb7dc7
Revises: 9c5e1cfd0e8a
Create Date: 2017-06-08 10:31:13.121197
"""
# revision identifiers, used by Alembic.
revision = '<KEY>'
down_revision = '9c5e1cfd0e8a'
from alembic import op
import sqlalchemy as sa
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('session', sa.Column('child_id', sa.String(length=20), nullable=True))
op.add_column('session', sa.Column('is_parent_session', sa.Boolean(), server_default='FALSE', nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('session', 'is_parent_session')
op.drop_column('session', 'child_id')
# ### end Alembic commands ###
| """Adding is_parent_session and child_id to session model
Revision ID: 90a07ecb7dc7
Revises: 9c5e1cfd0e8a
Create Date: 2017-06-08 10:31:13.121197
"""
# revision identifiers, used by Alembic.
revision = '<KEY>'
down_revision = '9c5e1cfd0e8a'
from alembic import op
import sqlalchemy as sa
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('session', sa.Column('child_id', sa.String(length=20), nullable=True))
op.add_column('session', sa.Column('is_parent_session', sa.Boolean(), server_default='FALSE', nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('session', 'is_parent_session')
op.drop_column('session', 'child_id')
# ### end Alembic commands ### | en | 0.524697 | Adding is_parent_session and child_id to session model Revision ID: 90a07ecb7dc7 Revises: 9c5e1cfd0e8a Create Date: 2017-06-08 10:31:13.121197 # revision identifiers, used by Alembic. # ### commands auto generated by Alembic - please adjust! ### # ### end Alembic commands ### # ### commands auto generated by Alembic - please adjust! ### # ### end Alembic commands ### | 1.393542 | 1 |
day_3/python_formatting.py | Ishaan-99-cyber/ml-workshop-wac-1 | 6 | 6619102 | <reponame>Ishaan-99-cyber/ml-workshop-wac-1
a = 10
print(f'hello {a + 10} his is normal string {[1, 2, 3, 4]} this is normal {10// 4}')
| a = 10
print(f'hello {a + 10} his is normal string {[1, 2, 3, 4]} this is normal {10// 4}') | none | 1 | 3.249446 | 3 | |
ITP449_HW02_XU_YANYU/ITP449_HW02_Q3_Xu_Yanyu.py | chloexu310/ITP | 0 | 6619103 | #<NAME>
#ITP_449, Spring 2020
#HW02
#Question 3
import re
def main():
ask = input("Please enter your password:")
while True:
if (len(ask) < 8):
print(":( Try Again")
ask = input("Please enter your password:")
elif not re.search("[a-z]", ask):
print(":( Try Again")
ask = input("Please enter your password:")
elif not re.search("[A-Z]", ask):
print(":( Try Again")
ask = input("Please enter your password:")
elif not re.search("[0-9]", ask):
print(":( Try Again")
ask = input("Please enter your password:")
elif not re.search("[-!@#$]", ask):
print(":( Try Again")
ask = input("Please enter your password:")
elif re.search("\s", ask):
print(":( Try Again")
ask = input("Please enter your password:")
else:
print("Access Granted")
break
main() | #<NAME>
#ITP_449, Spring 2020
#HW02
#Question 3
import re
def main():
ask = input("Please enter your password:")
while True:
if (len(ask) < 8):
print(":( Try Again")
ask = input("Please enter your password:")
elif not re.search("[a-z]", ask):
print(":( Try Again")
ask = input("Please enter your password:")
elif not re.search("[A-Z]", ask):
print(":( Try Again")
ask = input("Please enter your password:")
elif not re.search("[0-9]", ask):
print(":( Try Again")
ask = input("Please enter your password:")
elif not re.search("[-!@#$]", ask):
print(":( Try Again")
ask = input("Please enter your password:")
elif re.search("\s", ask):
print(":( Try Again")
ask = input("Please enter your password:")
else:
print("Access Granted")
break
main() | en | 0.345043 | #<NAME> #ITP_449, Spring 2020 #HW02 #Question 3 #$]", ask): | 4.051975 | 4 |
python/decoi/config.py | i-makinori/scrap_yard | 0 | 6619104 | <reponame>i-makinori/scrap_yard<filename>python/decoi/config.py
import copy
### config for 153xx
people_num = 35
row_length = 6
col_length = 6
seat_seed = []
for row in range(row_length):
for col in range(col_length):
seat_seed = seat_seed + [(row, col)]
seat_seed.remove((5,0))
seat_seed = seat_seed
## cheat of class members
def cheat_dict(seat_to, seat_from):
return {"seat_to":copy.copy(seat_to), "seat_from":copy.copy(seat_from)}
front = [(0,1),(0,2),(0,3),(0,4)]
fronter = [(1,0)]
side = [(2,0),(3,0),(2,5),(3,5)]
sider = [(1,5)]
#candidate
cheat_list = [
cheat_dict(front, fronter),
cheat_dict(side, sider),
]
| import copy
### config for 153xx
people_num = 35
row_length = 6
col_length = 6
seat_seed = []
for row in range(row_length):
for col in range(col_length):
seat_seed = seat_seed + [(row, col)]
seat_seed.remove((5,0))
seat_seed = seat_seed
## cheat of class members
def cheat_dict(seat_to, seat_from):
return {"seat_to":copy.copy(seat_to), "seat_from":copy.copy(seat_from)}
front = [(0,1),(0,2),(0,3),(0,4)]
fronter = [(1,0)]
side = [(2,0),(3,0),(2,5),(3,5)]
sider = [(1,5)]
#candidate
cheat_list = [
cheat_dict(front, fronter),
cheat_dict(side, sider),
] | en | 0.588529 | ### config for 153xx ## cheat of class members #candidate | 2.830965 | 3 |
object and classes lab/zoo.py | DiyanKalaydzhiev23/fundamentals---python | 0 | 6619105 | class Zoo:
__animals = 0
def __init__(self, name):
self.name = name
self.mammals = []
self.fishes = []
self.birds = []
def add_animal(self, species, name):
if species == "mammal":
self.mammals.append(name)
elif species == "fish":
self.fishes.append(name)
elif species == "bird":
self.birds.append(name)
Zoo.__animals += 1
def get_info(self, species):
result = ""
if species == "mammal":
result = f"Mammals in {self.name}: {', '.join(self.mammals)}"
elif species == "fish":
result = f"Fishes in {self.name}: {', '.join(self.fishes)}"
elif species == "bird":
result = f"Birds in {self.name}: {', '.join(self.birds)}"
print(result)
print(f"Total animals: {Zoo.__animals}")
zoo_name = input()
n = int(input())
zoo = Zoo(zoo_name)
for _ in range(n):
type_animal = input().split()
zoo.add_animal(type_animal[0], type_animal[1])
info = input()
zoo.get_info(info)
| class Zoo:
__animals = 0
def __init__(self, name):
self.name = name
self.mammals = []
self.fishes = []
self.birds = []
def add_animal(self, species, name):
if species == "mammal":
self.mammals.append(name)
elif species == "fish":
self.fishes.append(name)
elif species == "bird":
self.birds.append(name)
Zoo.__animals += 1
def get_info(self, species):
result = ""
if species == "mammal":
result = f"Mammals in {self.name}: {', '.join(self.mammals)}"
elif species == "fish":
result = f"Fishes in {self.name}: {', '.join(self.fishes)}"
elif species == "bird":
result = f"Birds in {self.name}: {', '.join(self.birds)}"
print(result)
print(f"Total animals: {Zoo.__animals}")
zoo_name = input()
n = int(input())
zoo = Zoo(zoo_name)
for _ in range(n):
type_animal = input().split()
zoo.add_animal(type_animal[0], type_animal[1])
info = input()
zoo.get_info(info)
| none | 1 | 3.75459 | 4 | |
gen3_etl/utils/schema.py | ohsu-comp-bio/gen3-etl | 1 | 6619106 | <filename>gen3_etl/utils/schema.py<gh_stars>1-10
import os
from yaml import dump, load, FullLoader
from genson import SchemaBuilder
from gen3_etl.utils.ioutils import reader
from gen3_etl.utils.defaults import camel_case
def generate_schema(items):
"""Creates json schema based on items."""
builder = SchemaBuilder()
builder.add_schema({"type": "object", "properties": {}})
for item in items:
builder.add_object(item)
return builder.to_schema()
def sample(item_paths, limit=100):
"""Reads limit number of records from each file in paths."""
for path in item_paths:
i = 0
for line in reader(path):
if i < limit:
yield line
i = i + 1
else:
break
def to_schema(item_paths):
"""Samples path and deduces schema."""
return generate_schema(sample(item_paths))
def template(type_name):
"""Parses template for type_name."""
try:
with open(os.path.join('schemas', '{}.yaml'.format(type_name))) as ins:
return load(ins, Loader=FullLoader)
except Exception as e:
with open(os.path.join('schemas', '{}.yaml'.format('default'))) as ins:
return load(ins, Loader=FullLoader)
def generate(item_paths, type_name=None, output_dir=None, schema_path=None, callback=None, links=[]):
"""
Create schema files.
Defaults for [type_name,schema_path,output_dir] derived from item_paths.
Supply your pre_processor to modify schema.
Returns schema path.
"""
# default type name to basename of first file
if not type_name:
for item_path in item_paths:
type_name = os.path.basename(item_path).split('.')[0]
break
# default output_dir to dirname of first file
if not output_dir:
for item_path in item_paths:
output_dir = os.path.dirname(item_path)
break
# default schema_path to output_dir + type_name
if not schema_path:
schema_path = os.path.join(output_dir, '{}.yaml'.format(type_name))
schema = template(type_name)
imported_properties = to_schema(item_paths)['properties']
# default to string ['null', 'string'] if only null or string defined
for k in imported_properties:
p = imported_properties[k]
if p.get('type') == 'null':
p['type'] = ['null', 'string']
if p.get('type') == 'string':
p['type'] = ['null', 'string']
if p.get('type') == 'integer':
p['type'] = ['null', 'integer']
schema['properties'].update(imported_properties)
schema['title'] = camel_case(type_name)
schema['description'] = 'autogenerated definitions for {}'.format(schema['title'])
schema['id'] = type_name
schema['links'] = links
if callback:
schema = callback(schema)
# cross check
rp = set(schema['required'])
sp = set(schema['systemProperties'])
p = set(schema['properties'].keys())
assert sp.issubset(p), 'schema has system property(s) not found in properties {}'.format(sp - p)
assert rp.issubset(p), 'schema has required property(s) not found in properties {}'.format(rp - p)
# for k in schema['properties']:
# p = schema['properties'][k]
# if '$ref' in p:
# continue
# assert "'''null'''" not in p['type'], '{} contains {}'.format(k, "'''null'''")
# print(k, p['type'])
with open(schema_path, 'wt') as ins:
ins.write(dump(schema, default_flow_style=False))
return schema_path
| <filename>gen3_etl/utils/schema.py<gh_stars>1-10
import os
from yaml import dump, load, FullLoader
from genson import SchemaBuilder
from gen3_etl.utils.ioutils import reader
from gen3_etl.utils.defaults import camel_case
def generate_schema(items):
"""Creates json schema based on items."""
builder = SchemaBuilder()
builder.add_schema({"type": "object", "properties": {}})
for item in items:
builder.add_object(item)
return builder.to_schema()
def sample(item_paths, limit=100):
"""Reads limit number of records from each file in paths."""
for path in item_paths:
i = 0
for line in reader(path):
if i < limit:
yield line
i = i + 1
else:
break
def to_schema(item_paths):
"""Samples path and deduces schema."""
return generate_schema(sample(item_paths))
def template(type_name):
"""Parses template for type_name."""
try:
with open(os.path.join('schemas', '{}.yaml'.format(type_name))) as ins:
return load(ins, Loader=FullLoader)
except Exception as e:
with open(os.path.join('schemas', '{}.yaml'.format('default'))) as ins:
return load(ins, Loader=FullLoader)
def generate(item_paths, type_name=None, output_dir=None, schema_path=None, callback=None, links=[]):
"""
Create schema files.
Defaults for [type_name,schema_path,output_dir] derived from item_paths.
Supply your pre_processor to modify schema.
Returns schema path.
"""
# default type name to basename of first file
if not type_name:
for item_path in item_paths:
type_name = os.path.basename(item_path).split('.')[0]
break
# default output_dir to dirname of first file
if not output_dir:
for item_path in item_paths:
output_dir = os.path.dirname(item_path)
break
# default schema_path to output_dir + type_name
if not schema_path:
schema_path = os.path.join(output_dir, '{}.yaml'.format(type_name))
schema = template(type_name)
imported_properties = to_schema(item_paths)['properties']
# default to string ['null', 'string'] if only null or string defined
for k in imported_properties:
p = imported_properties[k]
if p.get('type') == 'null':
p['type'] = ['null', 'string']
if p.get('type') == 'string':
p['type'] = ['null', 'string']
if p.get('type') == 'integer':
p['type'] = ['null', 'integer']
schema['properties'].update(imported_properties)
schema['title'] = camel_case(type_name)
schema['description'] = 'autogenerated definitions for {}'.format(schema['title'])
schema['id'] = type_name
schema['links'] = links
if callback:
schema = callback(schema)
# cross check
rp = set(schema['required'])
sp = set(schema['systemProperties'])
p = set(schema['properties'].keys())
assert sp.issubset(p), 'schema has system property(s) not found in properties {}'.format(sp - p)
assert rp.issubset(p), 'schema has required property(s) not found in properties {}'.format(rp - p)
# for k in schema['properties']:
# p = schema['properties'][k]
# if '$ref' in p:
# continue
# assert "'''null'''" not in p['type'], '{} contains {}'.format(k, "'''null'''")
# print(k, p['type'])
with open(schema_path, 'wt') as ins:
ins.write(dump(schema, default_flow_style=False))
return schema_path
| en | 0.510887 | Creates json schema based on items. Reads limit number of records from each file in paths. Samples path and deduces schema. Parses template for type_name. Create schema files. Defaults for [type_name,schema_path,output_dir] derived from item_paths. Supply your pre_processor to modify schema. Returns schema path. # default type name to basename of first file # default output_dir to dirname of first file # default schema_path to output_dir + type_name # default to string ['null', 'string'] if only null or string defined # cross check # for k in schema['properties']: # p = schema['properties'][k] # if '$ref' in p: # continue # assert "'''null'''" not in p['type'], '{} contains {}'.format(k, "'''null'''") # print(k, p['type']) | 2.359723 | 2 |
src/kaiowa/core/criteria.py | revensky/kaiowa | 0 | 6619107 | <filename>src/kaiowa/core/criteria.py
from __future__ import annotations
import abc
from typing import Any, Sequence, Union, TYPE_CHECKING
from kaiowa.core.utils import quote
if TYPE_CHECKING:
from kaiowa.core.selectables import Term
Filterable = Union["Criterion", "Term"]
class Criterion(abc.ABC):
"""
Representation of an abstract criterion of the SQL Query.
It usually represents the operation filters presented in the :meth:`where` method.
"""
@abc.abstractmethod
def __str__(self) -> str:
"""
Returns the formatted SQL statement of the criterion.
:return: Criterion's SQL statement.
:rtype: str
"""
def __eq__(self, other: Filterable) -> Equal:
return Equal(self, other)
def __ne__(self, other: Filterable) -> NotEqual:
return NotEqual(self, other)
def __lt__(self, other: Filterable) -> LessThan:
return LessThan(self, other)
def __le__(self, other: Filterable) -> LessEqual:
return LessEqual(self, other)
def __gt__(self, other: Filterable) -> GreaterThan:
return GreaterThan(self, other)
def __ge__(self, other: Filterable) -> GreaterEqual:
return GreaterEqual(self, other)
def __and__(self, other: Filterable) -> And:
return And(self, other)
def __or__(self, other: Filterable) -> Or:
return Or(self, other)
def __invert__(self) -> Not:
return Not(self)
def __neg__(self) -> Negative:
return Negative(self)
def __pos__(self) -> Criterion:
return self
def __add__(self, other: Filterable) -> Addition:
return Addition(self, other)
def __sub__(self, other: Filterable) -> Subtraction:
return Subtraction(self, other)
def __mul__(self, other: Filterable) -> Multiplication:
return Multiplication(self, other)
def __truediv__(self, other: Filterable) -> Division:
return Division(self, other)
def __radd__(self, other: Filterable) -> Addition:
return Addition(other, self)
def __rsub__(self, other: Filterable) -> Subtraction:
return Subtraction(other, self)
def __rmul__(self, other: Filterable) -> Multiplication:
return Multiplication(other, self)
def __rtruediv__(self, other: Filterable) -> Division:
return Division(other, self)
def _parse_value(self, value: Filterable) -> str:
if isinstance(value, (bool, int, float, str)):
return Constant(value)
return value
class Precedence(Criterion):
def __init__(self, criterion: Criterion) -> None:
self.criterion = criterion
def __str__(self) -> str:
return f"({self.criterion})"
class Constant(Criterion):
def __init__(self, value: Union[int, float, bool, str]) -> None:
if isinstance(value, bool):
self.value = str(value).upper()
elif isinstance(value, str):
quote_char = '"' if "'" in value else "'"
self.value = quote(value, quote_char)
else:
self.value = str(value)
def __str__(self) -> str:
return self.value
class Unary(Criterion):
def __init__(self, term: Union[Term, Criterion]) -> None:
self.term = term
class Binary(Criterion):
def __init__(
self, left: Union[Term, Criterion], right: Union[Term, Criterion]
) -> None:
self.left = left
self.right = right
class Equal(Binary):
def __str__(self) -> str:
self.right = self._parse_value(self.right)
return f"{str(self.left)} = {str(self.right)}"
class NotEqual(Binary):
def __str__(self) -> str:
return f"{str(self.left)} <> {str(self.right)}"
class LessThan(Binary):
def __str__(self) -> str:
return f"{str(self.left)} < {str(self.right)}"
class LessEqual(Binary):
def __str__(self) -> str:
return f"{str(self.left)} <= {str(self.right)}"
class GreaterThan(Binary):
def __str__(self) -> str:
return f"{str(self.left)} > {str(self.right)}"
class GreaterEqual(Binary):
def __str__(self) -> str:
return f"{str(self.left)} >= {str(self.right)}"
class And(Binary):
def __str__(self) -> str:
return f"({str(self.left)}) AND ({self.right})"
class Or(Binary):
def __str__(self) -> str:
return f"({str(self.left)}) OR ({str(self.right)})"
class Not(Unary):
def __str__(self) -> str:
return f"NOT ({str(self.term)})"
class Negative(Unary):
def __str__(self) -> str:
return f"-{str(self.term)}"
class Addition(Binary):
def __str__(self) -> str:
return f"({str(self.left)}) + ({str(self.right)})"
class Subtraction(Binary):
def __str__(self) -> str:
return f"({str(self.left)}) - ({str(self.right)})"
class Multiplication(Binary):
def __str__(self) -> str:
return f"({str(self.left)}) * ({str(self.right)})"
class Division(Binary):
def __str__(self) -> str:
return f"({str(self.left)}) / ({str(self.right)})"
class IsNull(Criterion):
def __init__(self, term: Term) -> None:
self.term = term
def __str__(self) -> str:
return f"{str(self.term)} IS NULL"
class IsNotNull(Criterion):
def __init__(self, term: Term) -> None:
self.term = term
def __str__(self) -> str:
return f"{str(self.term)} IS NOT NULL"
class In(Criterion):
def __init__(self, left: Term, right: Sequence[Any]) -> None:
self.left = left
self.right = right
def __str__(self) -> str:
values = [quote(value) for value in self.right]
return f"{str(self.left)} IN ({','.join(values)})"
class NotIn(Criterion):
def __init__(self, left: Term, right: Sequence[Any]) -> None:
self.left = left
self.right = right
def __str__(self) -> str:
values = [quote(value) for value in self.right]
return f"{str(self.left)} NOT IN ({','.join(values)})"
class Like(Criterion):
def __init__(self, term: Term, expr: str) -> None:
self.term = term
self.expr = expr
def __str__(self) -> str:
return f"{str(self.term)} LIKE {quote(self.expr)}"
class NotLike(Criterion):
def __init__(self, term: Term, expr: str) -> None:
self.term = term
self.expr = expr
def __str__(self) -> str:
return f"{str(self.term)} NOT LIKE {quote(self.expr)}"
class ILike(Criterion):
def __init__(self, term: Term, expr: str) -> None:
self.term = term
self.expr = expr
def __str__(self) -> str:
return f"{str(self.term)} ILIKE {quote(self.expr)}"
class NotILike(Criterion):
def __init__(self, term: Term, expr: str) -> None:
self.term = term
self.expr = expr
def __str__(self) -> str:
return f"{str(self.term)} NOT ILIKE {quote(self.expr)}"
class Between(Criterion):
def __init__(self, term: Term, start: Any, end: Any) -> None:
self.term = term
self.start = start
self.end = end
def __str__(self) -> str:
return f"{str(self.term)} BETWEEN {self.start} AND {self.end}"
class NotBetween(Criterion):
def __init__(self, term: Term, start: Any, end: Any) -> None:
self.term = term
self.start = start
self.end = end
def __str__(self) -> str:
return f"{str(self.term)} NOT BETWEEN {self.start} AND {self.end}"
class Distinct(Criterion):
def __init__(self, left: Term, right: Filterable) -> None:
self.left = left
self.right = right
def __str__(self) -> str:
return f"{str(self.left)} IS DISTINCT FROM {str(self.right)}"
class NotDistinct(Criterion):
def __init__(self, left: Term, right: Filterable) -> None:
self.left = left
self.right = right
def __str__(self) -> str:
return f"{str(self.left)} IS NOT DISTINCT FROM {str(self.right)}"
class True_(Unary):
def __str__(self) -> str:
return f"{str(self.term)} IS TRUE"
class NotTrue(Unary):
def __str__(self) -> str:
return f"{str(self.term)} IS NOT TRUE"
class False_(Unary):
def __str__(self) -> str:
return f"{str(self.term)} IS FALSE"
class NotFalse(Unary):
def __str__(self) -> str:
return f"{str(self.term)} IS NOT FALSE"
class Unknown(Unary):
def __str__(self) -> str:
return f"{str(self.term)} IS UNKNOWN"
class NotUnknown(Unary):
def __str__(self) -> str:
return f"{str(self.term)} IS NOT UNKNOWN"
| <filename>src/kaiowa/core/criteria.py
from __future__ import annotations
import abc
from typing import Any, Sequence, Union, TYPE_CHECKING
from kaiowa.core.utils import quote
if TYPE_CHECKING:
from kaiowa.core.selectables import Term
Filterable = Union["Criterion", "Term"]
class Criterion(abc.ABC):
"""
Representation of an abstract criterion of the SQL Query.
It usually represents the operation filters presented in the :meth:`where` method.
"""
@abc.abstractmethod
def __str__(self) -> str:
"""
Returns the formatted SQL statement of the criterion.
:return: Criterion's SQL statement.
:rtype: str
"""
def __eq__(self, other: Filterable) -> Equal:
return Equal(self, other)
def __ne__(self, other: Filterable) -> NotEqual:
return NotEqual(self, other)
def __lt__(self, other: Filterable) -> LessThan:
return LessThan(self, other)
def __le__(self, other: Filterable) -> LessEqual:
return LessEqual(self, other)
def __gt__(self, other: Filterable) -> GreaterThan:
return GreaterThan(self, other)
def __ge__(self, other: Filterable) -> GreaterEqual:
return GreaterEqual(self, other)
def __and__(self, other: Filterable) -> And:
return And(self, other)
def __or__(self, other: Filterable) -> Or:
return Or(self, other)
def __invert__(self) -> Not:
return Not(self)
def __neg__(self) -> Negative:
return Negative(self)
def __pos__(self) -> Criterion:
return self
def __add__(self, other: Filterable) -> Addition:
return Addition(self, other)
def __sub__(self, other: Filterable) -> Subtraction:
return Subtraction(self, other)
def __mul__(self, other: Filterable) -> Multiplication:
return Multiplication(self, other)
def __truediv__(self, other: Filterable) -> Division:
return Division(self, other)
def __radd__(self, other: Filterable) -> Addition:
return Addition(other, self)
def __rsub__(self, other: Filterable) -> Subtraction:
return Subtraction(other, self)
def __rmul__(self, other: Filterable) -> Multiplication:
return Multiplication(other, self)
def __rtruediv__(self, other: Filterable) -> Division:
return Division(other, self)
def _parse_value(self, value: Filterable) -> str:
if isinstance(value, (bool, int, float, str)):
return Constant(value)
return value
class Precedence(Criterion):
def __init__(self, criterion: Criterion) -> None:
self.criterion = criterion
def __str__(self) -> str:
return f"({self.criterion})"
class Constant(Criterion):
def __init__(self, value: Union[int, float, bool, str]) -> None:
if isinstance(value, bool):
self.value = str(value).upper()
elif isinstance(value, str):
quote_char = '"' if "'" in value else "'"
self.value = quote(value, quote_char)
else:
self.value = str(value)
def __str__(self) -> str:
return self.value
class Unary(Criterion):
def __init__(self, term: Union[Term, Criterion]) -> None:
self.term = term
class Binary(Criterion):
def __init__(
self, left: Union[Term, Criterion], right: Union[Term, Criterion]
) -> None:
self.left = left
self.right = right
class Equal(Binary):
def __str__(self) -> str:
self.right = self._parse_value(self.right)
return f"{str(self.left)} = {str(self.right)}"
class NotEqual(Binary):
def __str__(self) -> str:
return f"{str(self.left)} <> {str(self.right)}"
class LessThan(Binary):
def __str__(self) -> str:
return f"{str(self.left)} < {str(self.right)}"
class LessEqual(Binary):
def __str__(self) -> str:
return f"{str(self.left)} <= {str(self.right)}"
class GreaterThan(Binary):
def __str__(self) -> str:
return f"{str(self.left)} > {str(self.right)}"
class GreaterEqual(Binary):
def __str__(self) -> str:
return f"{str(self.left)} >= {str(self.right)}"
class And(Binary):
def __str__(self) -> str:
return f"({str(self.left)}) AND ({self.right})"
class Or(Binary):
def __str__(self) -> str:
return f"({str(self.left)}) OR ({str(self.right)})"
class Not(Unary):
def __str__(self) -> str:
return f"NOT ({str(self.term)})"
class Negative(Unary):
def __str__(self) -> str:
return f"-{str(self.term)}"
class Addition(Binary):
def __str__(self) -> str:
return f"({str(self.left)}) + ({str(self.right)})"
class Subtraction(Binary):
def __str__(self) -> str:
return f"({str(self.left)}) - ({str(self.right)})"
class Multiplication(Binary):
def __str__(self) -> str:
return f"({str(self.left)}) * ({str(self.right)})"
class Division(Binary):
def __str__(self) -> str:
return f"({str(self.left)}) / ({str(self.right)})"
class IsNull(Criterion):
def __init__(self, term: Term) -> None:
self.term = term
def __str__(self) -> str:
return f"{str(self.term)} IS NULL"
class IsNotNull(Criterion):
def __init__(self, term: Term) -> None:
self.term = term
def __str__(self) -> str:
return f"{str(self.term)} IS NOT NULL"
class In(Criterion):
def __init__(self, left: Term, right: Sequence[Any]) -> None:
self.left = left
self.right = right
def __str__(self) -> str:
values = [quote(value) for value in self.right]
return f"{str(self.left)} IN ({','.join(values)})"
class NotIn(Criterion):
def __init__(self, left: Term, right: Sequence[Any]) -> None:
self.left = left
self.right = right
def __str__(self) -> str:
values = [quote(value) for value in self.right]
return f"{str(self.left)} NOT IN ({','.join(values)})"
class Like(Criterion):
def __init__(self, term: Term, expr: str) -> None:
self.term = term
self.expr = expr
def __str__(self) -> str:
return f"{str(self.term)} LIKE {quote(self.expr)}"
class NotLike(Criterion):
def __init__(self, term: Term, expr: str) -> None:
self.term = term
self.expr = expr
def __str__(self) -> str:
return f"{str(self.term)} NOT LIKE {quote(self.expr)}"
class ILike(Criterion):
def __init__(self, term: Term, expr: str) -> None:
self.term = term
self.expr = expr
def __str__(self) -> str:
return f"{str(self.term)} ILIKE {quote(self.expr)}"
class NotILike(Criterion):
def __init__(self, term: Term, expr: str) -> None:
self.term = term
self.expr = expr
def __str__(self) -> str:
return f"{str(self.term)} NOT ILIKE {quote(self.expr)}"
class Between(Criterion):
def __init__(self, term: Term, start: Any, end: Any) -> None:
self.term = term
self.start = start
self.end = end
def __str__(self) -> str:
return f"{str(self.term)} BETWEEN {self.start} AND {self.end}"
class NotBetween(Criterion):
def __init__(self, term: Term, start: Any, end: Any) -> None:
self.term = term
self.start = start
self.end = end
def __str__(self) -> str:
return f"{str(self.term)} NOT BETWEEN {self.start} AND {self.end}"
class Distinct(Criterion):
def __init__(self, left: Term, right: Filterable) -> None:
self.left = left
self.right = right
def __str__(self) -> str:
return f"{str(self.left)} IS DISTINCT FROM {str(self.right)}"
class NotDistinct(Criterion):
def __init__(self, left: Term, right: Filterable) -> None:
self.left = left
self.right = right
def __str__(self) -> str:
return f"{str(self.left)} IS NOT DISTINCT FROM {str(self.right)}"
class True_(Unary):
def __str__(self) -> str:
return f"{str(self.term)} IS TRUE"
class NotTrue(Unary):
def __str__(self) -> str:
return f"{str(self.term)} IS NOT TRUE"
class False_(Unary):
def __str__(self) -> str:
return f"{str(self.term)} IS FALSE"
class NotFalse(Unary):
def __str__(self) -> str:
return f"{str(self.term)} IS NOT FALSE"
class Unknown(Unary):
def __str__(self) -> str:
return f"{str(self.term)} IS UNKNOWN"
class NotUnknown(Unary):
def __str__(self) -> str:
return f"{str(self.term)} IS NOT UNKNOWN"
| en | 0.781331 | Representation of an abstract criterion of the SQL Query. It usually represents the operation filters presented in the :meth:`where` method. Returns the formatted SQL statement of the criterion. :return: Criterion's SQL statement. :rtype: str | 2.678596 | 3 |
rl_threat_hunting/filter/import_libs.py | ivukovicRL/threat_hunting_library | 2 | 6619108 | <filename>rl_threat_hunting/filter/import_libs.py
COMMON_IMPORT_LIBS = frozenset([
'a4wapi.dll',
'a4wrv.dll',
'aavm4h.dll',
'ac_as.dll',
'ac_c.dll',
'accesor.dll',
'accom.dll',
'accommonclass.dll',
'accommondialog.dll',
'accore.dll',
'acctrl.dll',
'acdb18.dll',
'acdllenv.dll',
'ace-w64r-20-1.dll',
'ace.dll',
'ace32.dll',
'acecore.dll',
'acewstr.dll',
'acobject.dll',
'acpal.dll',
'acs_util.dll',
'activeds.dll',
'adac20b.dll',
'adapt_for_imports.dll',
'adm_core6.dll',
'adm_coreutils6.dll',
'adobexmp.dll',
'ael73.dll',
'ael73d.dll',
'aepic.dll',
'aevlsub.dll',
'afbase.dll',
'afc31.dll',
'afcore.dll',
'aftutils.dll',
'afutil.dll',
'agm.dll',
'algorithms_base_release_x64.dll',
'amqxcs2.dll',
'apexframework_x64.dll',
'api-device-config.dll',
'api-ms-win-appmodel-identity-l1-2-0.dll',
'api-ms-win-appmodel-runtime-internal-l1-1-1.dll',
'api-ms-win-appmodel-runtime-internal-l1-1-2.dll',
'api-ms-win-appmodel-runtime-l1-1-0.dll',
'api-ms-win-appmodel-runtime-l1-1-1.dll',
'api-ms-win-appmodel-state-l1-2-0.dll',
'api-ms-win-appmodel-unlock-l1-1-0.dll',
'api-ms-win-core-apiquery-l1-1-0.dll',
'api-ms-win-core-atoms-l1-1-0.dll',
'api-ms-win-core-com-l1-1-0.dll',
'api-ms-win-core-com-l1-1-1.dll',
'api-ms-win-core-com-l2-1-1.dll',
'api-ms-win-core-com-midlproxystub-l1-1-0.dll',
'api-ms-win-core-com-private-l1-1-0.dll',
'api-ms-win-core-console-l1-1-0.dll',
'api-ms-win-core-console-l2-1-0.dll',
'api-ms-win-core-crt-l1-1-0.dll',
'api-ms-win-core-crt-l2-1-0.dll',
'api-ms-win-core-datetime-l1-1-0.dll',
'api-ms-win-core-datetime-l1-1-1.dll',
'api-ms-win-core-debug-l1-1-0.dll',
'api-ms-win-core-debug-l1-1-1.dll',
'api-ms-win-core-delayload-l1-1-0.dll',
'api-ms-win-core-delayload-l1-1-1.dll',
'api-ms-win-core-errorhandling-l1-1-0.dll',
'api-ms-win-core-errorhandling-l1-1-1.dll',
'api-ms-win-core-errorhandling-l1-1-2.dll',
'api-ms-win-core-errorhandling-l1-1-3.dll',
'api-ms-win-core-featurestaging-l1-1-0.dll',
'api-ms-win-core-fibers-l1-1-0.dll',
'api-ms-win-core-fibers-l1-1-1.dll',
'api-ms-win-core-file-l1-1-0.dll',
'api-ms-win-core-file-l1-2-0.dll',
'api-ms-win-core-file-l1-2-1.dll',
'api-ms-win-core-file-l1-2-2.dll',
'api-ms-win-core-file-l2-1-0.dll',
'api-ms-win-core-file-l2-1-1.dll',
'api-ms-win-core-file-l2-1-2.dll',
'api-ms-win-core-handle-l1-1-0.dll',
'api-ms-win-core-heap-l1-1-0.dll',
'api-ms-win-core-heap-l1-2-0.dll',
'api-ms-win-core-heap-l2-1-0.dll',
'api-ms-win-core-heap-obsolete-l1-1-0.dll',
'api-ms-win-core-interlocked-l1-1-0.dll',
'api-ms-win-core-interlocked-l1-2-0.dll',
'api-ms-win-core-io-l1-1-0.dll',
'api-ms-win-core-io-l1-1-1.dll',
'api-ms-win-core-job-l1-1-0.dll',
'api-ms-win-core-job-l2-1-0.dll',
'api-ms-win-core-kernel32-legacy-l1-1-0.dll',
'api-ms-win-core-kernel32-legacy-l1-1-1.dll',
'api-ms-win-core-kernel32-private-l1-1-0.dll',
'api-ms-win-core-kernel32-private-l1-1-1.dll',
'api-ms-win-core-largeinteger-l1-1-0.dll',
'api-ms-win-core-libraryloader-l1-1-0.dll',
'api-ms-win-core-libraryloader-l1-1-1.dll',
'api-ms-win-core-libraryloader-l1-2-0.dll',
'api-ms-win-core-libraryloader-l1-2-1.dll',
'api-ms-win-core-libraryloader-l1-2-2.dll',
'api-ms-win-core-libraryloader-l2-1-0.dll',
'api-ms-win-core-localization-l1-1-0.dll',
'api-ms-win-core-localization-l1-2-0.dll',
'api-ms-win-core-localization-l1-2-1.dll',
'api-ms-win-core-localization-l1-2-2.dll',
'api-ms-win-core-localization-l2-1-0.dll',
'api-ms-win-core-localization-obsolete-l1-2-0.dll',
'api-ms-win-core-localization-private-l1-1-0.dll',
'api-ms-win-core-localregistry-l1-1-0.dll',
'api-ms-win-core-marshal-l1-1-0.dll',
'api-ms-win-core-memory-l1-1-0.dll',
'api-ms-win-core-memory-l1-1-1.dll',
'api-ms-win-core-memory-l1-1-2.dll',
'api-ms-win-core-memory-l1-1-3.dll',
'api-ms-win-core-misc-l1-1-0.dll',
'api-ms-win-core-namedpipe-l1-1-0.dll',
'api-ms-win-core-namedpipe-l1-2-0.dll',
'api-ms-win-core-namespace-l1-1-0.dll',
'api-ms-win-core-normalization-l1-1-0.dll',
'api-ms-win-core-path-l1-1-0.dll',
'api-ms-win-core-perfcounters-l1-1-0.dll',
'api-ms-win-core-privateprofile-l1-1-0.dll',
'api-ms-win-core-privateprofile-l1-1-1.dll',
'api-ms-win-core-processenvironment-l1-1-0.dll',
'api-ms-win-core-processenvironment-l1-2-0.dll',
'api-ms-win-core-processthreads-l1-1-0.dll',
'api-ms-win-core-processthreads-l1-1-1.dll',
'api-ms-win-core-processthreads-l1-1-2.dll',
'api-ms-win-core-processthreads-l1-1-3.dll',
'api-ms-win-core-processtopology-obsolete-l1-1-0.dll',
'api-ms-win-core-profile-l1-1-0.dll',
'api-ms-win-core-psapi-ansi-l1-1-0.dll',
'api-ms-win-core-psapi-l1-1-0.dll',
'api-ms-win-core-psm-app-l1-1-0.dll',
'api-ms-win-core-psm-key-l1-1-0.dll',
'api-ms-win-core-quirks-l1-1-0.dll',
'api-ms-win-core-realtime-l1-1-0.dll',
'api-ms-win-core-registry-l1-1-0.dll',
'api-ms-win-core-registry-l1-1-1.dll',
'api-ms-win-core-registry-l2-1-0.dll',
'api-ms-win-core-registryuserspecific-l1-1-0.dll',
'api-ms-win-core-rtlsupport-l1-1-0.dll',
'api-ms-win-core-rtlsupport-l1-2-0.dll',
'api-ms-win-core-shlwapi-legacy-l1-1-0.dll',
'api-ms-win-core-shlwapi-obsolete-l1-1-0.dll',
'api-ms-win-core-shutdown-l1-1-0.dll',
'api-ms-win-core-sidebyside-l1-1-0.dll',
'api-ms-win-core-string-l1-1-0.dll',
'api-ms-win-core-string-l2-1-0.dll',
'api-ms-win-core-string-l2-1-1.dll',
'api-ms-win-core-string-obsolete-l1-1-0.dll',
'api-ms-win-core-stringansi-l1-1-0.dll',
'api-ms-win-core-synch-l1-1-0.dll',
'api-ms-win-core-synch-l1-2-0.dll',
'api-ms-win-core-synch-l1-2-1.dll',
'api-ms-win-core-sysinfo-l1-1-0.dll',
'api-ms-win-core-sysinfo-l1-2-0.dll',
'api-ms-win-core-sysinfo-l1-2-1.dll',
'api-ms-win-core-sysinfo-l1-2-3.dll',
'api-ms-win-core-systemtopology-l1-1-0.dll',
'api-ms-win-core-threadpool-l1-1-0.dll',
'api-ms-win-core-threadpool-l1-2-0.dll',
'api-ms-win-core-threadpool-legacy-l1-1-0.dll',
'api-ms-win-core-threadpool-private-l1-1-0.dll',
'api-ms-win-core-timezone-l1-1-0.dll',
'api-ms-win-core-toolhelp-l1-1-0.dll',
'api-ms-win-core-url-l1-1-0.dll',
'api-ms-win-core-util-l1-1-0.dll',
'api-ms-win-core-version-l1-1-0.dll',
'api-ms-win-core-version-l1-1-1.dll',
'api-ms-win-core-windowserrorreporting-l1-1-0.dll',
'api-ms-win-core-windowserrorreporting-l1-1-1.dll',
'api-ms-win-core-winrt-error-l1-1-0.dll',
'api-ms-win-core-winrt-error-l1-1-1.dll',
'api-ms-win-core-winrt-l1-1-0.dll',
'api-ms-win-core-winrt-propertysetprivate-l1-1-1.dll',
'api-ms-win-core-winrt-robuffer-l1-1-0.dll',
'api-ms-win-core-winrt-string-l1-1-0.dll',
'api-ms-win-core-wow64-l1-1-0.dll',
'api-ms-win-core-wow64-l1-1-1.dll',
'api-ms-win-crt-conio-l1-1-0.dll',
'api-ms-win-crt-convert-l1-1-0.dll',
'api-ms-win-crt-environment-l1-1-0.dll',
'api-ms-win-crt-filesystem-l1-1-0.dll',
'api-ms-win-crt-heap-l1-1-0.dll',
'api-ms-win-crt-locale-l1-1-0.dll',
'api-ms-win-crt-math-l1-1-0.dll',
'api-ms-win-crt-multibyte-l1-1-0.dll',
'api-ms-win-crt-private-l1-1-0.dll',
'api-ms-win-crt-process-l1-1-0.dll',
'api-ms-win-crt-runtime-l1-1-0.dll',
'api-ms-win-crt-stdio-l1-1-0.dll',
'api-ms-win-crt-string-l1-1-0.dll',
'api-ms-win-crt-time-l1-1-0.dll',
'api-ms-win-crt-utility-l1-1-0.dll',
'api-ms-win-devices-config-l1-1-1.dll',
'api-ms-win-devices-query-l1-1-0.dll',
'api-ms-win-devices-query-l1-1-1.dll',
'api-ms-win-downlevel-advapi32-l1-1-0.dll',
'api-ms-win-downlevel-advapi32-l2-1-0.dll',
'api-ms-win-downlevel-kernel32-l1-1-0.dll',
'api-ms-win-downlevel-normaliz-l1-1-0.dll',
'api-ms-win-downlevel-ole32-l1-1-0.dll',
'api-ms-win-downlevel-shell32-l1-1-0.dll',
'api-ms-win-downlevel-shlwapi-l1-1-0.dll',
'api-ms-win-downlevel-shlwapi-l2-1-0.dll',
'api-ms-win-downlevel-user32-l1-1-0.dll',
'api-ms-win-downlevel-version-l1-1-0.dll',
'api-ms-win-dx-d3dkmt-l1-1-0.dll',
'api-ms-win-eventing-classicprovider-l1-1-0.dll',
'api-ms-win-eventing-consumer-l1-1-0.dll',
'api-ms-win-eventing-controller-l1-1-0.dll',
'api-ms-win-eventing-legacy-l1-1-0.dll',
'api-ms-win-eventing-provider-l1-1-0.dll',
'api-ms-win-eventing-tdh-l1-1-0.dll',
'api-ms-win-eventlog-legacy-l1-1-0.dll',
'api-ms-win-mm-time-l1-1-0.dll',
'api-ms-win-ntuser-rectangle-l1-1-0.dll',
'api-ms-win-ntuser-sysparams-l1-1-0.dll',
'api-ms-win-ole32-ie-l1-1-0.dll',
'api-ms-win-oobe-notification-l1-1-0.dll',
'api-ms-win-power-base-l1-1-0.dll',
'api-ms-win-power-setting-l1-1-0.dll',
'api-ms-win-rtcore-ntuser-clipboard-l1-1-0.dll',
'api-ms-win-rtcore-ntuser-private-l1-1-0.dll',
'api-ms-win-rtcore-ntuser-synch-l1-1-0.dll',
'api-ms-win-rtcore-ntuser-window-l1-1-0.dll',
'api-ms-win-security-accesshlpr-l1-1-0.dll',
'api-ms-win-security-activedirectoryclient-l1-1-0.dll',
'api-ms-win-security-base-l1-1-0.dll',
'api-ms-win-security-base-l1-2-0.dll',
'api-ms-win-security-capability-l1-1-0.dll',
'api-ms-win-security-credentials-l1-1-0.dll',
'api-ms-win-security-credentials-l2-1-0.dll',
'api-ms-win-security-cryptoapi-l1-1-0.dll',
'api-ms-win-security-grouppolicy-l1-1-0.dll',
'api-ms-win-security-isolatedcontainer-l1-1-0.dll',
'api-ms-win-security-lsalookup-l1-1-0.dll',
'api-ms-win-security-lsalookup-l1-1-1.dll',
'api-ms-win-security-lsalookup-l1-1-2.dll',
'api-ms-win-security-lsalookup-l2-1-0.dll',
'api-ms-win-security-lsalookup-l2-1-1.dll',
'api-ms-win-security-lsapolicy-l1-1-0.dll',
'api-ms-win-security-provider-l1-1-0.dll',
'api-ms-win-security-sddl-l1-1-0.dll',
'api-ms-win-security-systemfunctions-l1-1-0.dll',
'api-ms-win-security-trustee-l1-1-0.dll',
'api-ms-win-service-core-l1-1-0.dll',
'api-ms-win-service-core-l1-1-1.dll',
'api-ms-win-service-management-l1-1-0.dll',
'api-ms-win-service-management-l2-1-0.dll',
'api-ms-win-service-private-l1-1-0.dll',
'api-ms-win-service-winsvc-l1-1-0.dll',
'api-ms-win-service-winsvc-l1-2-0.dll',
'api-ms-win-shcore-comhelpers-l1-1-0.dll',
'api-ms-win-shcore-obsolete-l1-1-0.dll',
'api-ms-win-shcore-path-l1-1-0.dll',
'api-ms-win-shcore-registry-l1-1-0.dll',
'api-ms-win-shcore-registry-l1-1-1.dll',
'api-ms-win-shcore-scaling-l1-1-0.dll',
'api-ms-win-shcore-scaling-l1-1-1.dll',
'api-ms-win-shcore-stream-l1-1-0.dll',
'api-ms-win-shcore-stream-winrt-l1-1-0.dll',
'api-ms-win-shcore-sysinfo-l1-1-0.dll',
'api-ms-win-shcore-taskpool-l1-1-0.dll',
'api-ms-win-shcore-thread-l1-1-0.dll',
'api-ms-win-shcore-unicodeansi-l1-1-0.dll',
'api-ms-win-shell-namespace-l1-1-0.dll',
'api-ms-win-shell-shdirectory-l1-1-0.dll',
'api-ms-win-shell-shellcom-l1-1-0.dll',
'api-ms-win-shell-shellfolders-l1-1-0.dll',
'api-ms-win-shlwapi-winrt-storage-l1-1-1.dll',
'api-ms-win-stateseparation-helpers-l1-1-0.dll',
'apibasegm.dll',
'apiclient.dll',
'app.dll',
'apphelp.dll',
'application.dll',
'apputil.dll',
'appvisvsubsystems32.dll',
'appvisvsubsystems64.dll',
'appxalluserstore.dll',
'apr-iconv.dll',
'archive.dll',
'arkimage.dll',
'arkiostub.dll',
'armgrclientapi.dll',
'arrays.dll',
'arrorden.dll',
'arsp.dll',
'arst.dll',
'asciilib.dll',
'ascom10.dll',
'ashbase.dll',
'ashtask.dll',
'asio.dll',
'asl.dll',
'aslfoundation.dll',
'aswcmnbs.dll',
'aswcmnis.dll',
'aswcmnos.dll',
'aswengin.dll',
'aswengldr.dll',
'aswip.dll',
'aswlog.dll',
'aswproperty.dll',
'asynctask.dll',
'ataxsub.dll',
'atl71.dll',
'atl80.dll',
'atl90.dll',
'atl100.dll',
'atl110.dll',
'atom.dll',
'atwbxui15.dll',
'audioeng.dll',
'authz.dll',
'avcodec-52.dll',
'avcodec-55.dll',
'avcodec-56.dll',
'avcodec-57.dll',
'avcodec-58.dll',
'avcodec.dll',
'avdevice-52.dll',
'avdevice-58.dll',
'avfilter-6.dll',
'avfilter-7.dll',
'avformat-52.dll',
'avformat-55.dll',
'avformat-56.dll',
'avformat-57.dll',
'avformat-58.dll',
'avformat.dll',
'avkys.dll',
'avrt.dll',
'avtdatabase_ser.dll',
'avtdbatts.dll',
'avutil-50.dll',
'avutil-51.dll',
'avutil-52.dll',
'avutil-54.dll',
'avutil-55.dll',
'avutil-56.dll',
'avutil.dll',
'awt.dll',
'ax5api.dll',
'axe8sharedexpat.dll',
'aygshell.dll',
'backend.dll',
'bafl.dll',
'base.dll',
'basebroker2.dll',
'basez.dll',
'basic.dll',
'basicos.dll',
'basicos2.dll',
'bass.dll',
'bass_fx.dll',
'bc32fn.dll',
'bc32ui.dll',
'bc_api.dll',
'bcd.dll',
'bcgcbpro300.dll',
'bcgcbpro2430u120.dll',
'bcgcbpro2730ud141.dll',
'bcgcbpro3020.dll',
'bcp47langs.dll',
'bcrypt.dll',
'bcryptprimitives.dll',
'bds52f.dll',
'bib.dll',
'binkw32.dll',
'biosdk.dll',
'bluetoothapis.dll',
'bmm.dll',
'boom16_mtc.dll',
'boost_chrono-vc140-mt-1_60.dll',
'boost_date_time-vc140-mt-1_60.dll',
'boost_date_time.dll',
'boost_filesystem-mt.dll',
'boost_filesystem-vc140-mt-1_60.dll',
'boost_filesystem.dll',
'boost_program_options-w64r-20-1.dll',
'boost_python-vc100-mt-1_56.dll',
'boost_system-mt.dll',
'boost_system-vc120-mt-1_55.dll',
'boost_system-vc140-mt-1_60.dll',
'boost_system.dll',
'boost_thread-vc140-mt-1_60.dll',
'boost_threads.dll',
'borlndmm.dll',
'bugsplat.dll',
'bugtrap.dll',
'c0cdll1.dll',
'c0otb.dll',
'c2rui.dll',
'c4dll.dll',
'c5runx.dll',
'c55runx.dll',
'c60ascx.dll',
'c60dosx.dll',
'c60olex.dll',
'c60runx.dll',
'c60tpsx.dll',
'c_acs001.dll',
'c_atx001.dll',
'c_thgfi.dll',
'ca210_comm.dll',
'cabinet.dll',
'catafrfoundation.dll',
'catapplicationframe.dll',
'catdialogengine.dll',
'catgeometricobjects.dll',
'catgitinterfaces.dll',
'catgmgeometricinterfaces.dll',
'catgmmodelinterfaces.dll',
'catiaapplicationframe.dll',
'catinteractiveinterfaces.dll',
'catliteralfeatures.dll',
'catmathematics.dll',
'catmathstream.dll',
'catmechanicalmodeler.dll',
'catmecmodinterfaces.dll',
'catobjectmodelerbase.dll',
'catobjectmodelernavigator.dll',
'catobjectspecsmodeler.dll',
'catomx.dll',
'catplmidentificationaccess.dll',
'catproductstructure1.dll',
'catproductstructureinterfaces.dll',
'catsketcherinterfaces.dll',
'catsysts.dll',
'cattopologicalobjects.dll',
'catvisitf.dll',
'catvisualization.dll',
'catviz.dll',
'cblrtsm.dll',
'cblrtss.dll',
'cc3250mt.dll',
'cc3260mt.dll',
'cc3270mt.dll',
'cc3280mt.dll',
'cc32250mt.dll',
'cclib.dll',
'ccmcore.dll',
'cellcore.dll',
'cerlapp0471.dll',
'cerlsql0471.dll',
'cfgmgr32.dll',
'cg.dll',
'cggl.dll',
'chakracore.dll',
'chapp.dll',
'chart.dll',
'chrome_elf.dll',
'cktbl32.dll',
'claasc.dll',
'clabas.dll',
'clados.dll',
'clafm3.dll',
'clamss.dll',
'clanet.dll',
'claole.dll',
'clarun.dll',
'clatps.dll',
'clawe.dll',
'clblas.dll',
'clblast.dll',
'client_monitor.dll',
'clientapi.dll',
'clutilclasses.dll',
'cmmlib.dll',
'cmnapp.dll',
'cmnbind.dll',
'cmngen.dll',
'cmngui.dll',
'combase.dll',
'commandmanager.dll',
'commctrl.dll',
'common.dll',
'commondata.dll',
'commonlib.dll',
'commonui.dll',
'comphelp4msc.dll',
'comphelper.dll',
'comphelpmsc.dll',
'componen.dll',
'componentslib.dll',
'comppkgsup.dll',
'concrt140.dll',
'concrt140_app.dll',
'config.dll',
'confint.dll',
'conpastilla.dll',
'contcype.dll',
'controls.dll',
'cooltype.dll',
'coom14_mtc.dll',
'core.dll',
'core4.dll',
'core83.dll',
'core_rl_magick_.dll',
'core_rl_magickcore_.dll',
'coreapp.dll',
'coredll.dll',
'corefoundation.dll',
'coreint.dll',
'corelocalization.dll',
'coremanager.dll',
'coremessaging.dll',
'coretime.dll',
'coreuicomponents.dll',
'cp3245mt.dll',
'cpd3core.dll',
'cpd3datacore.dll',
'cpptools4.dll',
'cppu3.dll',
'cppuhelper3msc.dll',
'crashhandler.dll',
'crashreport.dll',
'crashrpt.dll',
'crashrpt1402.dll',
'crashrpt1403.dll',
'credui.dll',
'crlcomponent.dll',
'crlfrmwk.dll',
'crlmath.dll',
'crlresources.dll',
'crlutils.dll',
'crlutl.dll',
'crpe32.dll',
'crypt32.dll',
'cryptbase.dll',
'cryptngc.dll',
'cryptsp.dll',
'cryptui.dll',
'cryptxml.dll',
'cs200_usbcomm.dll',
'cscapi.dll',
'csi.dll',
'ctlapi.dll',
'ctreestd.dll',
'cvappmgr.dll',
'cvarchive.dll',
'cvbasiclib.dll',
'cvdatapipe.dll',
'cvfocus.dll',
'cvirte.dll',
'cvjobclient.dll',
'cvjobquery.dll',
'cvlib.dll',
'cvmmclientapi.dll',
'cvperformancemonitorlib.dll',
'cvsession.dll',
'cvxmlmsgsbase.dll',
'cw32core.dll',
'cw3230.dll',
'cw_main.dll',
'cwcommon.dll',
'cwdatabase.dll',
'cwhhla.dll',
'cwqtlib.dll',
'cximage.dll',
'cximagecrtu.dll',
'cxlibw-5-0.dll',
'cxxwrap_julia.dll',
'cyassert.dll',
'cygbabl-0.1-0.dll',
'cyggcc_s-1.dll',
'cyggcc_s-seh-1.dll',
'cyggegl-0.2-0.dll',
'cyggegl-0.4-0.dll',
'cygglib-2.0-0.dll',
'cyggobject-2.0-0.dll',
'cygiconv-2.dll',
'cygintl-8.dll',
'cygkritaglobal-16.dll',
'cygkritaimage-16.dll',
'cygkritaui-16.dll',
'cygqtcore-4.dll',
'cygruby200.dll',
'cygstdc++-6.dll',
'cygwin1.dll',
'cygwin10.dll',
'cygx11-6.dll',
'cygxi-6.dll',
'cygz.dll',
'cygznc-1.7.5.dll',
'cypedir.dll',
'cypeio.dll',
'cypemath.dll',
'cypemem.dll',
'cypemsgs.dll',
'cypestr.dll',
'd2d1.dll',
'd3d8.dll',
'd3d9.dll',
'd3d10.dll',
'd3d10_1.dll',
'd3d11.dll',
'd3d12.dll',
'd3dcompiler_43.dll',
'd3dcompiler_47.dll',
'd3dx9_26.dll',
'd3dx9_30.dll',
'd3dx9_40.dll',
'd3dx9_42.dll',
'd3dx9_43.dll',
'd3dx10_43.dll',
'd3dx11_43.dll',
'd3dxof.dll',
'dacommon.dll',
'dalog.dll',
'dastock.dll',
'data.dll',
'database.dll',
'databasemanager.dll',
'datacenter.dll',
'datahelpers.dll',
'datalayer.dll',
'daui.dll',
'dautil.dll',
'dbgeng.dll',
'dbghelp.dll',
'dclibxml2.dll',
'dclipx.dll',
'dcomp.dll',
'dcp-1.0.dll',
'dcpomatic2.dll',
'ddimage4.0.dll',
'ddraw.dll',
'deng_core.dll',
'deploy.dll',
'detoured.dll',
'deviceassociation.dll',
'devil.dll',
'devmgr.dll',
'devobj.dll',
'devsdk_base_release_x64.dll',
'devsdk_data_avol_release_x64.dll',
'devsdk_data_release_x64.dll',
'dhcpcsvc.dll',
'dhcpcsvc6.dll',
'di0panv2.dll',
'dialogcommon.dll',
'difxapi.dll',
'dinput.dll',
'dinput8.dll',
'dll_loader.dll',
'dmcmnutils.dll',
'doc.dll',
'dplayx.dll',
'drs32.dll',
'drvstore.dll',
'dsbacr32.dll',
'dsbaf32.dll',
'dsbas32.dll',
'dsdacl32.dll',
'dsetup.dll',
'dsintr32.dll',
'dsparse.dll',
'dsreg.dll',
'dsrole.dll',
'dssabc32.dll',
'dssys32.dll',
'dssysu32.dll',
'dui70.dll',
'duifw.dll',
'duilib.dll',
'duser.dll',
'dvacore.dll',
'dvamediatypes.dll',
'dvaui.dll',
'dvaunittesting.dll',
'dvaworkspace.dll',
'dwbase.dll',
'dwrite.dll',
'dwutility.dll',
'dxgi.dll',
'dxva2.dll',
'dynamiclink.dll',
'dynamorio.dll',
'eappcfg.dll',
'eax.dll',
'ecommon.dll',
'edgeiso.dll',
'editormodel.dll',
'editorserialization.dll',
'eesofcore.dll',
'efsadu.dll',
'efsrv.dll',
'efsutil.dll',
'elementcommon.dll',
'elscore.dll',
'emmisc.dll',
'encoding-conversion.dll',
'engine.dll',
'entograf.dll',
'env100.dll',
'env200.dll',
'ercoreclbbase471.dll',
'ercoreclbmemory471.dll',
'err_base.dll',
'errorlog.dll',
'errorreport.dll',
'errortrace.dll',
'ersyscclbsystem471.dll',
'ersysdclbrecsystem471.dll',
'ersystclbcomutil471.dll',
'ersystclbdataaccessmgr471.dll',
'euser.dll',
'event_manager.dll',
'event_routing.dll',
'evr.dll',
'evtaskapi.dll',
'exsec32.dll',
'ext-ms-onecore-appmodel-staterepository-cache-l1-1-0.dll',
'ext-ms-win-com-ole32-l1-1-0.dll',
'ext-ms-win-com-ole32-l1-1-1.dll',
'ext-ms-win-core-iuri-l1-1-0.dll',
'ext-ms-win-devmgmt-policy-l1-1-0.dll',
'ext-ms-win-devmgmt-policy-l1-1-1.dll',
'ext-ms-win-edputil-policy-l1-1-0.dll',
'ext-ms-win-els-elscore-l1-1-0.dll',
'ext-ms-win-feclient-encryptedfile-l1-1-0.dll',
'ext-ms-win-gdi-dc-create-l1-1-0.dll',
'ext-ms-win-gdi-dc-l1-2-0.dll',
'ext-ms-win-gdi-draw-l1-1-0.dll',
'ext-ms-win-gdi-draw-l1-1-1.dll',
'ext-ms-win-gdi-font-l1-1-0.dll',
'ext-ms-win-kernel32-package-l1-1-0.dll',
'ext-ms-win-mrmcorer-resmanager-l1-1-0.dll',
'ext-ms-win-networking-wlanapi-l1-1-0.dll',
'ext-ms-win-ntuser-dialogbox-l1-1-0.dll',
'ext-ms-win-ntuser-draw-l1-1-0.dll',
'ext-ms-win-ntuser-gui-l1-1-0.dll',
'ext-ms-win-ntuser-keyboard-l1-1-0.dll',
'ext-ms-win-ntuser-message-l1-1-0.dll',
'ext-ms-win-ntuser-message-l1-1-1.dll',
'ext-ms-win-ntuser-misc-l1-1-0.dll',
'ext-ms-win-ntuser-private-l1-1-1.dll',
'ext-ms-win-ntuser-rectangle-ext-l1-1-0.dll',
'ext-ms-win-ntuser-synch-l1-1-0.dll',
'ext-ms-win-ntuser-uicontext-ext-l1-1-0.dll',
'ext-ms-win-ntuser-window-l1-1-0.dll',
'ext-ms-win-ntuser-window-l1-1-1.dll',
'ext-ms-win-ntuser-window-l1-1-2.dll',
'ext-ms-win-ntuser-window-l1-1-3.dll',
'ext-ms-win-ntuser-window-l1-1-4.dll',
'ext-ms-win-ntuser-windowclass-l1-1-0.dll',
'ext-ms-win-ntuser-windowstation-l1-1-0.dll',
'ext-ms-win-ole32-bindctx-l1-1-0.dll',
'ext-ms-win-rtcore-gdi-devcaps-l1-1-0.dll',
'ext-ms-win-rtcore-gdi-object-l1-1-0.dll',
'ext-ms-win-rtcore-gdi-rgn-l1-1-0.dll',
'ext-ms-win-rtcore-ntuser-cursor-l1-1-0.dll',
'ext-ms-win-rtcore-ntuser-dc-access-l1-1-0.dll',
'ext-ms-win-rtcore-ntuser-synch-ext-l1-1-0.dll',
'ext-ms-win-rtcore-ntuser-syscolors-l1-1-0.dll',
'ext-ms-win-rtcore-ntuser-sysparams-l1-1-0.dll',
'ext-ms-win-rtcore-ntuser-window-ext-l1-1-0.dll',
'ext-ms-win-security-credui-l1-1-0.dll',
'ext-ms-win-session-usermgr-l1-1-0.dll',
'ext-ms-win-session-usertoken-l1-1-0.dll',
'ext-ms-win-session-winsta-l1-1-0.dll',
'ext-ms-win-session-wtsapi32-l1-1-0.dll',
'ext-ms-win-shell-shell32-l1-2-0.dll',
'ext-ms-win-shell-shell32-l1-2-1.dll',
'ext-ms-win-shell32-shellfolders-l1-1-0.dll',
'ext-ms-win-uxtheme-themes-l1-1-0.dll',
'ext-ms-win-wer-reporting-l1-1-0.dll',
'ext-ms-win-wevtapi-eventlog-l1-1-0.dll',
'ext_iccore73.dll',
'ext_iccore73d.dll',
'extensionsystem.dll',
'extensionsystem4.dll',
'f3biio.dll',
'f3bilpio.dll',
'f3biprct.dll',
'fastprox.dll',
'faultrep.dll',
'favorites.dll',
'fbclient.dll',
'fdframework.dll',
'fdrextensions.dll',
'fdrplugin.dll',
'ffmpeg.dll',
'ffmpegsumo.dll',
'ffwrapper.dll',
'fineobj.dll',
'firewallapi.dll',
'fltlib.dll',
'fmod.dll',
'fmod_event.dll',
'fmodex.dll',
'fontsub.dll',
'foundation.dll',
'framedynos.dll',
'framework.dll',
'frameworkextensions.dll',
'freecadapp.dll',
'freecadbase.dll',
'freecadgui.dll',
'freeimage.dll',
'freetype.dll',
'friextensions.dll',
'ftd2xx.dll',
'functiondialog.dll',
'fury3_mtc.dll',
'fwpuclnt.dll',
'gamedata.dll',
'gdacutl.dll',
'gdal204.dll',
'gemrb_core.dll',
'gemx_nvt73d.dll',
'gendata73d.dll',
'geom.dll',
'geometry.dll',
'gf.dll',
'gfsdk_aftermath_lib.x64.dll',
'gfx.dll',
'gimp-1.1.dll',
'glew32.dll',
'glib-2.0.dll',
'globals.dll',
'glog.dll',
'glu32.dll',
'glut32.dll',
'gmsecapi.dll',
'googledesktopcommon.dll',
'gpu_solver.dll',
'granny2.dll',
'graphic.dll',
'graphicdata.dll',
'graphics.dll',
'groovenew.dll',
'grooveutil.dll',
'gsdmain.dll',
'gsiou.dll',
'gsl73.dll',
'gsl73d.dll',
'gsroot.dll',
'gufuncs.dll',
'gui.dll',
'gui_oiv.dll',
'guibase.dll',
'guicore.dll',
'happy.dll',
'hccutils.dll',
'hdf5.dll',
'hid.dll',
'hlink.dll',
'hlog.dll',
'hpr.dll',
'htmlayout.dll',
'httpapi.dll',
'i3core-w64r-20-1.dll',
'i18nisolang1msc.dll',
'i18nlangtag.dll',
'ibmtss.dll',
'icdbif73.dll',
'icdbif73d.dll',
'icmain73.dll',
'icmain73d.dll',
'icmp.dll',
'iconv.dll',
'icsarith.dll',
'icsform.dll',
'icsgenl.dll',
'icudt55.dll',
'icudt56.dll',
'icuin55.dll',
'icuin56.dll',
'icuin63.dll',
'icuin64.dll',
'icuin65.dll',
'icuio63.dll',
'icuio64.dll',
'icuio65.dll',
'icuserc73.dll',
'icuserc73d.dll',
'icutu43.dll',
'icuuc40.dll',
'icuuc43.dll',
'icuuc55.dll',
'icuuc56.dll',
'icuuc58.dll',
'icuuc63.dll',
'icuuc64.dll',
'icuuc65.dll',
'ieadvpack.dll',
'ieframe.dll',
'iertutil.dll',
'ieshims.dll',
'ieui.dll',
'igx.dll',
'ihszras.dll',
'ijl15.dll',
'iliuni32.dll',
'image.dll',
'imagerenderer.dll',
'imgutil.dll',
'imm32.dll',
'imsconnect.dll',
'inetcomm.dll',
'instapi110.dll',
'intl.dll',
'inxs.dll',
'iocptcp.dll',
'iocpudp.dll',
'iolibu.dll',
'iopersisteddataaccess.dll',
'ipc.dll',
'irisctl.dll',
'isxutils.dll',
'itaxsub.dll',
'iup.dll',
'iuplua52.dll',
'j9thr26.dll',
'jansson.dll',
'java.dll',
'jawt.dll',
'jbase5.dll',
'jdproxy.dll',
'js0fm.dll',
'js0group.dll',
'js32.dll',
'jsoncpp.dll',
'juli.dll',
'jvm.dll',
'kdownload.dll',
'kelib.dll',
'kernel.dll',
'kernelbase.dll',
'kernelutil.dll',
'kf5configcore.dll',
'kf5coreaddons.dll',
'kf5i18n.dll',
'kf5kiocore.dll',
'kf5purpose.dll',
'knowledgeitf.dll',
'kool_ade.dll',
'kpathsea.dll',
'kpathsea600.dll',
'kpathsea630.dll',
'kpathsea630w64.dll',
'ksffoundation.dll',
'kso.dll',
'ksolite.dll',
'ktmw32.dll',
'ku_http.dll',
'kwdatadef.dll',
'kwlib.dll',
'kwlog.dll',
'kwmodconfig.dll',
'kwmusiccore.dll',
'labcontrols.dll',
'labgen.dll',
'labutils.dll',
'language.dll',
'languages.dll',
'lay100.dll',
'lay200.dll',
'ldap60.dll',
'lgpllibs.dll',
'lib.io.char.dll',
'lib.stdc.dll',
'lib.syslog.dll',
'libabiword-3.0.dll',
'libadm_core6.dll',
'libadm_coreaudio6.dll',
'libadm_coreaudioencoder6.dll',
'libadm_coredemuxer6.dll',
'libadm_coreimage6.dll',
'libadm_coremuxer6.dll',
'libadm_coreui6.dll',
'libadm_coreutils6.dll',
'libadm_corevideoencoder6.dll',
'libadm_corevideofilter6.dll',
'libadm_uiqt56.dll',
'libapr-1.dll',
'libapriconv-1-0.dll',
'libapriconv-1.dll',
'libaprutil-1.dll',
'libasapmgr-mingw.dll',
'libatk-1.0-0.dll',
'libbabl-0.1-0.dll',
'libbase_utils.dll',
'libbind9.dll',
'libboost_filesystem-mt.dll',
'libboost_system-mt.dll',
'libboost_thread_win32-mt.dll',
'libbz2-1.dll',
'libcairo-2.dll',
'libcamel-1.2-19.dll',
'libcapstone.dll',
'libcasadi.dll',
'libcef.dll',
'libcocos2d.dll',
'libcrypto-1_1-x64.dll',
'libcrypto-1_1.dll',
'libcrypto10.dll',
'libcurl-4.dll',
'libcurl.dll',
'libcxxwrap_julia.dll',
'libdarktable.dll',
'libdns.dll',
'libeay32.dll',
'libecore-1.dll',
'libedataserver-1.2-14.dll',
'libedataserverui-1.2-11.dll',
'libegl.dll',
'libeina-1.dll',
'libenchant.dll',
'libeutil-0.dll',
'libevas-1.dll',
'libexpat-1.dll',
'libexpat.dll',
'libfbxsdk.dll',
'libfclasses.dll',
'libfontconfig-1.dll',
'libfreetype-6.dll',
'libgcc_s_dw2-1.dll',
'libgcc_s_seh-1.dll',
'libgcc_s_sjlj-1.dll',
'libgcrypt-20.dll',
'libgdk-3-0.dll',
'libgdk-win32-2.0-0.dll',
'libgdk_pixbuf-2.0-0.dll',
'libgegl-0.2-0.dll',
'libgegl-0.3-0.dll',
'libgegl-0.4-0.dll',
'libgfortran-3.dll',
'libgimp-2.0-0.dll',
'libgimpbase-2.0-0.dll',
'libgimpcolor-2.0-0.dll',
'libgimpui-2.0-0.dll',
'libgimpwidgets-2.0-0.dll',
'libgio-2.0-0.dll',
'libglesv2.dll',
'libglib-2.0-0.dll',
'libglibmm-2.4-1.dll',
'libgmodule-2.0-0.dll',
'libgmp-10.dll',
'libgnutls-30.dll',
'libgobject-2.0-0.dll',
'libgomp-1.dll',
'libgpac.dll',
'libgpg-error-0.dll',
'libgphoto2-6.dll',
'libgphoto2_port-12.dll',
'libgraphicsmagick-3.dll',
'libgrass_dbmibase.7.7.dll',
'libgrass_gis.7.4.0.dll',
'libgrass_gis.7.4.4.dll',
'libgrass_gis.7.6.0.dll',
'libgrass_gis.7.6.dll',
'libgrass_gis.7.7.dll',
'libgrass_raster.7.4.0.dll',
'libgrass_raster.7.7.dll',
'libgrass_vector.7.7.dll',
'libgstaudio-1.0-0.dll',
'libgstbase-0.10-0.dll',
'libgstbase-1.0-0.dll',
'libgstbase-1.5-0.dll',
'libgstpbutils-1.0-0.dll',
'libgstreamer-0.10-0.dll',
'libgstreamer-1.0-0.dll',
'libgstreamer-1.5-0.dll',
'libgsttag-1.0-0.dll',
'libgstvideo-1.0-0.dll',
'libgtk-3-0.dll',
'libgtk-win32-2.0-0.dll',
'libguide40.dll',
'libgwyapp2-0.dll',
'libgwyddion2-0.dll',
'libgwymodule2-0.dll',
'libgwyprocess2-0.dll',
'libharfbuzz-0.dll',
'libhpdf_140.dll',
'libhttpd.dll',
'libiconv-2.dll',
'libifcoremd.dll',
'libifportmd.dll',
'libindexing.dll',
'libintl-8.dll',
'libintl.dll',
'libiomp5md.dll',
'libisc.dll',
'libisccfg.dll',
'libite-mingw.dll',
'libitkcommon-4.13.dll',
'libitkioimagebase-4.13.dll',
'libitksys-4.13.dll',
'libitkvnl-4.13.dll',
'libjam.dll',
'libjpeg-8.dll',
'libjpeg-62.dll',
'libkdecore.dll',
'libkdeui.dll',
'libkdevplatforminterfaces.dll',
'libkf5configcore.dll',
'libkf5coreaddons.dll',
'libkf5i18n.dll',
'libkf5widgetsaddons.dll',
'libkio.dll',
'libkritacommand.dll',
'libkritaflake.dll',
'libkritaglobal.dll',
'libkritaimage.dll',
'libkritapigment.dll',
'libkritaui.dll',
'libkritawidgets.dll',
'libkritawidgetutils.dll',
'libldns-2.dll',
'liblogicalaccess.dll',
'liblzo2-2.dll',
'libmagickcore-7.q16hdri-5.dll',
'libmagickcore-7.q16hdri-6.dll',
'libmex.dll',
'libmitsuba-core.dll',
'libmitsuba-render.dll',
'libmlt-6.dll',
'libmmd.dll',
'libmpr.dll',
'libmwfl.dll',
'libmwi18n.dll',
'libmwservices.dll',
'libmx.dll',
'libmysql.dll',
'libnbbase.dll',
'libnettle-6.dll',
'libnspr4.dll',
'libnwcore.dll',
'libnwshared.dll',
'libopenblas.dll',
'libopencv_calib3d341.dll',
'libopencv_calib3d342.dll',
'libopencv_core340.dll',
'libopencv_core341.dll',
'libopencv_core342.dll',
'libopencv_core343.dll',
'libopencv_core400.dll',
'libopencv_core401.dll',
'libopencv_core411.dll',
'libopencv_datasets341.dll',
'libopencv_datasets342.dll',
'libopencv_features2d342.dll',
'libopencv_highgui340.dll',
'libopencv_highgui341.dll',
'libopencv_highgui342.dll',
'libopencv_highgui343.dll',
'libopencv_highgui400.dll',
'libopencv_highgui401.dll',
'libopencv_highgui411.dll',
'libopencv_imgcodecs341.dll',
'libopencv_imgcodecs342.dll',
'libopencv_imgcodecs343.dll',
'libopencv_imgcodecs400.dll',
'libopencv_imgcodecs401.dll',
'libopencv_imgcodecs411.dll',
'libopencv_imgproc341.dll',
'libopencv_imgproc342.dll',
'libopencv_imgproc343.dll',
'libopencv_imgproc400.dll',
'libopencv_imgproc401.dll',
'libopencv_imgproc411.dll',
'libopencv_video342.dll',
'libopencv_videoio342.dll',
'libopencv_videoio343.dll',
'libopenthreads.dll',
'libopenthreadsd.dll',
'liborg_blueberry_osgi.dll',
'liborg_blueberry_ui.dll',
'libosg.dll',
'libosgd.dll',
'libosgdb.dll',
'libosgdbd.dll',
'libosgearth.dll',
'libosgearthd.dll',
'libosgearthutil.dll',
'libosgga.dll',
'libosggad.dll',
'libosgtext.dll',
'libosgutil.dll',
'libosgutild.dll',
'libosgviewer.dll',
'libosgviewerd.dll',
'libosgwidget.dll',
'libp3dtool.dll',
'libp3dtoolconfig.dll',
'libpal.dll',
'libpanda.dll',
'libpandaegg.dll',
'libpandaexpress.dll',
'libpango-1.0-0.dll',
'libpangocairo-1.0-0.dll',
'libpart.dll',
'libpcre-1.dll',
'libpcre.dll',
'libpidgin.dll',
'libpixman-1-0.dll',
'libplc4.dll',
'libplds4.dll',
'libpng16-16.dll',
'libpng16.dll',
'libpom.dll',
'libportability.dll',
'libpq.dll',
'libprotobuf.dll',
'libpulsecommon-7.1.dll',
'libpulsecore-7.1.dll',
'libpurple.dll',
'libpython2.7.dll',
'libpython3.6m.dll',
'libpython3.7m.dll',
'libpython3.8m.dll',
'libqt5core.dll',
'libqt5gui.dll',
'libquadmath-0.dll',
'librazorcat-mingw.dll',
'libredwg-0.dll',
'libreq.dll',
'librudiments.dll',
'libsasl.dll',
'libsndfile-1.dll',
'libsnooper.dll',
'libsodium.dll',
'libspreadsheet-1-10-17.dll',
'libsqlite3-0.dll',
'libsqlite3.dll',
'libssh2.dll',
'libssl-1_1-x64.dll',
'libssl-1_1.dll',
'libssp-0.dll',
'libstdc++-6.dll',
'libsti32.dll',
'libsvn_delta-1.dll',
'libsvn_subr-1.dll',
'libswipl.dll',
'libsyss.dll',
'libsystre-0.dll',
'libtc.dll',
'libtccore.dll',
'libtcinit.dll',
'libtcod.dll',
'libtiff-5.dll',
'libtitania.dll',
'libtulip-core-4.8.dll',
'libtulip-core-4.9.dll',
'libtulip-core-4.10.dll',
'libtulip-core-5.0.dll',
'libtulip-core-5.2.dll',
'libtulip-core-5.3.dll',
'libtulip-gui-4.8.dll',
'libtulip-ogl-4.8.dll',
'libtulip-ogl-5.2.dll',
'libtypetable-mingw.dll',
'libugutils.dll',
'libunity_core_shared.dll',
'libusb-1.0.dll',
'libusb0.dll',
'libut.dll',
'libuv.dll',
'libvlc.dll',
'libvlccore.dll',
'libvorbisfile.dll',
'libvorbisfile_64.dll',
'libvtkcommoncore-8.1.dll',
'libvtkcommoncorepython27d-8.1.dll',
'libvtkcommondatamodel-8.1.dll',
'libvtkcommondatamodelpython27d-8.1.dll',
'libvtkcommonexecutionmodel-8.1.dll',
'libvtkcommonexecutionmodelpython27d-8.1.dll',
'libvtkcommonmath-8.1.dll',
'libvtkcommonmisc-8.1.dll',
'libvtkcommonsystem-8.1.dll',
'libvtkcommontransforms-8.1.dll',
'libvtkfilterscore-8.1.dll',
'libvtkfiltersgeneral-8.1.dll',
'libvtkfilterssources-8.1.dll',
'libvtkimagingcore-8.1.dll',
'libvtkiocore-8.1.dll',
'libvtkrenderingcore-8.1.dll',
'libvtkrenderingcorepython27d-8.1.dll',
'libvtksys-8.1.dll',
'libvtkwrappingpython27core-8.1.dll',
'libwinpthread-1.dll',
'libwireshark.dll',
'libwsutil.dll',
'libxl.dll',
'libxmccore.dll',
'libxml-mingw.dll',
'libxml2-2.dll',
'libxml2.dll',
'libxmlsec.dll',
'libxslt.dll',
'license.dll',
'lima-common-data.dll',
'lima-common-factory.dll',
'lima-common-fsaaccess.dll',
'lima-common-mediaprocessors.dll',
'lima-common-mediaticdata.dll',
'lima-common-misc.dll',
'lima-common-processunitframework.dll',
'lima-common-time.dll',
'lima-common-tools.dll',
'lima-common-xmlconfigurationfiles.dll',
'lima-lp-analysisdict.dll',
'lima-lp-analysishandlers.dll',
'lima-lp-annotationgraph.dll',
'lima-lp-automaton.dll',
'lima-lp-bagofwords.dll',
'lima-lp-client.dll',
'lima-lp-flattokenizer.dll',
'lima-lp-lineartextrepresentation.dll',
'lima-lp-linguisticanalysisstructure.dll',
'lima-lp-linguisticdata.dll',
'lima-lp-linguisticprocessors.dll',
'lima-lp-linguisticresources.dll',
'lima-lp-misc.dll',
'lima-lp-propertycode.dll',
'lima-lp-specificentities.dll',
'lima-lp-syntacticanalysis.dll',
'lima-lp-textsegmentation.dll',
'linkinfo.dll',
'livelog.dll',
'locale.dll',
'log.dll',
'log4cplus.dll',
'log4cplusd.dll',
'log4cxx.dll',
'logger.dll',
'logging.dll',
'loggingplatform.dll',
'logmanager.dll',
'logoncli.dll',
'ltkrn14n.dll',
'ltkrn15u.dll',
'ltkrnu.dll',
'lua.dll',
'lua5.1.dll',
'lua51.dll',
'lua53.dll',
'lw.dll',
'lxgui.dll',
'mahrctl.dll',
'mahrtom.dll',
'mahruser.dll',
'mapi32.dll',
'marbase.dll',
'math.dll',
'maxapi.dll',
'maxutil.dll',
'mcereghandler.dll',
'mdaelib.dll',
'mediafoundation.dll',
'mediainfo.dll',
'mediatypes.dll',
'memory.dll',
'mergedlo.dll',
'mesh.dll',
'message.dll',
'messagepublisher.dll',
'mf.dll',
'mfc42d.dll',
'mfc70.dll',
'mfc70u.dll',
'mfc71.dll',
'mfc71u.dll',
'mfc80.dll',
'mfc80u.dll',
'mfc80xu.dll',
'mfc90.dll',
'mfc90u.dll',
'mfc100.dll',
'mfc100u.dll',
'mfc110.dll',
'mfc110u.dll',
'mfc120.dll',
'mfc120u.dll',
'mfc140.dll',
'mfc140d.dll',
'mfc140u.dll',
'mfc140ud.dll',
'mfcce300.dll',
'mfco42d.dll',
'mfplat.dll',
'mfplay.dll',
'mfreadwrite.dll',
'mgmtapi.dll',
'mi.dll',
'miktex209-app.dll',
'miktex209-core.dll',
'miktex209-next-app.dll',
'miktex209-next-core.dll',
'miktex209-next-util.dll',
'miktex209-util.dll',
'mingwm10.dll',
'minizip.dll',
'misc.dll',
'mitkcoreext.dll',
'mkl_rt.dll',
'mlang.dll',
'mmdevapi.dll',
'mmfs2.dll',
'mmmsgbundle.dll',
'mobilephoneenv.dll',
'module_lifetime.dll',
'module_structures.dll',
'mosaiccore.dll',
'mosifs32.dll',
'movaviio.dll',
'mozalloc.dll',
'mozavutil.dll',
'mozglue.dll',
'mqtutil.dll',
'mqutil.dll',
'mrmcorer.dll',
'mrt100_app.dll',
'mrtrix-365b606c17c8f6f68f476f1f567a184afcddcb5c.dll',
'msasn1.dll',
'mscoree.dll',
'msdart.dll',
'msdmo.dll',
'msdrm.dll',
'msfeeds.dll',
'mshtml.dll',
'msi.dll',
'msiltcfg.dll',
'msl_all-dll90_x86.dll',
'msls31.dll',
'mso.dll',
'mso20imm.dll',
'mso20win32client.dll',
'mso30imm.dll',
'mso30win32client.dll',
'mso40uiwin32client.dll',
'mso97.dll',
'mso98win32client.dll',
'mso99lwin32client.dll',
'msocf.dll',
'mss32.dll',
'mstores.dll',
'msvbvm50.dll',
'msvcp70.dll',
'msvcp71.dll',
'msvcp90.dll',
'msvcp90d.dll',
'msvcp100.dll',
'msvcp100d.dll',
'msvcp110.dll',
'msvcp110_win.dll',
'msvcp110d.dll',
'msvcp120.dll',
'msvcp120_app.dll',
'msvcp120d.dll',
'msvcp120d_app.dll',
'msvcp140.dll',
'msvcp140_app.dll',
'msvcp140d.dll',
'msvcp140d_app.dll',
'msvcp_win.dll',
'msvcr71.dll',
'msvcr71d.dll',
'msvcr80d.dll',
'msvcr90.dll',
'msvcr90d.dll',
'msvcr100.dll',
'msvcr100d.dll',
'msvcr110.dll',
'msvcr110d.dll',
'msvcr120.dll',
'msvcr120_app.dll',
'msvcr120_clr0400.dll',
'msvcr120d.dll',
'msvcrt-ruby240.dll',
'msvcrt-ruby250.dll',
'msvcrt-ruby260.dll',
'msvcrt20.dll',
'msvcrtd.dll',
'msys-2.0.dll',
'msys-gcc_s-1.dll',
'msys-iconv-2.dll',
'msys-intl-8.dll',
'msys-python3.6m.dll',
'msys-ruby240.dll',
'msys-ruby260.dll',
'msys-svn_subr-1-0.dll',
'msys-z.dll',
'msys-znc-1.7.dll',
'mtslib.dll',
'mvcl14n.dll',
'mycom.dll',
'nagscreen.dll',
'namedobjects.dll',
'ncaaudiodev.dll',
'ncobjapi.dll',
'ncrypt.dll',
'ndfapi.dll',
'net.dll',
'netbw32.dll',
'netrap.dll',
'netutils.dll',
'network.dll',
'newdev.dll',
'nghttp2.dll',
'ninput.dll',
'nitroplatform12.dll',
'nitroplatform13.dll',
'nlsreportgenerator4807.dll',
'nnotes.dll',
'node.dll',
'normaliz.dll',
'npdf.dll',
'npparstb.dll',
'npremodule.dll',
'nscp_protobuf.dll',
'nsi.dll',
'nss3.dll',
'nssutil3.dll',
'nsw2lib.dll',
'ntdsapi.dll',
'ntshrui.dll',
'nuke83.dll',
'nvcloth_x64.dll',
'nvcuda.dll',
'nw_elf.dll',
'o3pubfunc.dll',
'oart.dll',
'oartodf.dll',
'objc.dll',
'objectmodelersystem.dll',
'obs.dll',
'occache.dll',
'oci.dll',
'odbc32.dll',
'odbccp32.dll',
'og81as.dll',
'og701asuc.dll',
'ogg.dll',
'oglmanager.dll',
'ogremain.dll',
'ogshell.dll',
'ogutil.dll',
'oisapp.dll',
'olepro32.dll',
'olmapi32.dll',
'opa.dll',
'openal32.dll',
'opencl.dll',
'openflipperpluginlib.dll',
'opengl32.dll',
'openglswitcherapi.dll',
'openmaya.dll',
'openmayaanim.dll',
'openvr_api.dll',
'oplib.dll',
'os.dll',
'osf.dll',
'osfshared.dll',
'osfui.dll',
'osii_messaging_c.dll',
'osii_sarc_c.dll',
'osii_system_c.dll',
'osppc.dll',
'osppcext.dll',
'otgdllview.dll',
'outlrpc.dll',
'owl52f.dll',
'packet.dll',
'palsstorage.dll',
'paramblk2.dll',
'parto.dll',
'pastatus.dll',
'pbvm90.dll',
'pbvm125.dll',
'pbvm170.dll',
'pcre.dll',
'pcwum.dll',
'pd.dll',
'pdfcore.dll',
'pdh.dll',
'perl528.dll',
'pgort140.dll',
'php5ts.dll',
'php7.dll',
'php7ts.dll',
'php8.dll',
'php8ts.dll',
'physx3_x64.dll',
'physx3_x86.dll',
'physx3characterkinematicdebug_x86.dll',
'physx3common_x64.dll',
'physx3common_x86.dll',
'physx3cooking_x64.dll',
'physxloader.dll',
'pig327.dll',
'plds4.dll',
'plib32.dll',
'pmruntime.dll',
'pncrt.dll',
'pocofoundation.dll',
'policymanager.dll',
'postproc-55.dll',
'powrprof.dll',
'ppcore.dll',
'pr0ctls.dll',
'pr0defs.dll',
'pr0rcci.dll',
'prgbase.dll',
'prgcore.dll',
'prldap60.dll',
'prm.dll',
'prntvpt.dll',
'procint.dll',
'procommon.dll',
'productutilities.dll',
'profapi.dll',
'progreso.dll',
'progressivehullcudalib.dll',
'projectexplorer4.dll',
'propsys.dll',
'protocol.dll',
'pslutils.dll',
'psplog.dll',
'psxlib.dll',
'pthread.dll',
'pthreadgc2.dll',
'pthreadvc2.dll',
'ptimer.dll',
'ptrace34.dll',
'public.dll',
'pubsub.dll',
'pxfoundation_x64.dll',
'pxpvdsdk_x64.dll',
'pyside2.abi3.dll',
'python3.dll',
'python23.dll',
'python24.dll',
'python25.dll',
'python26.dll',
'python27.dll',
'python32.dll',
'python33.dll',
'python34.dll',
'python35.dll',
'python36.dll',
'python37.dll',
'python38.dll',
'python39.dll',
'pywintypes27.dll',
'pywintypes39.dll',
'qbutilities.dll',
'qinetwork.dll',
'qiutils.dll',
'qoom19_mtc.dll',
'qqlivebase.dll',
'qqmusiccommon.dll',
'qt5concurrent.dll',
'qt5core.dll',
'qt5cored.dll',
'qt5dbus.dll',
'qt5designer.dll',
'qt5gui.dll',
'qt5guid.dll',
'qt5help.dll',
'qt5location.dll',
'qt5multimedia.dll',
'qt5multimediad.dll',
'qt5multimediawidgets.dll',
'qt5network.dll',
'qt5networkd.dll',
'qt5opengl.dll',
'qt5positioning.dll',
'qt5positioningd.dll',
'qt5printsupport.dll',
'qt5qml.dll',
'qt5qmld.dll',
'qt5quick.dll',
'qt5quickcontrols2.dll',
'qt5quickd.dll',
'qt5quicktemplates2.dll',
'qt5quickwidgets.dll',
'qt5script.dll',
'qt5sensors.dll',
'qt5serialport.dll',
'qt5sql.dll',
'qt5svg.dll',
'qt5test.dll',
'qt5webchannel.dll',
'qt5webengine.dll',
'qt5webenginecore.dll',
'qt5webenginewidgets.dll',
'qt5webkit.dll',
'qt5webkitwidgets.dll',
'qt5websockets.dll',
'qt5widgets.dll',
'qt5widgetsd.dll',
'qt5winextras.dll',
'qt5xml.dll',
'qt5xmlpatterns.dll',
'qt53dcore.dll',
'qt53dcored.dll',
'qt53dinput.dll',
'qt53drender.dll',
'qt53drenderd.dll',
'qtcore4.dll',
'qtcored4.dll',
'qtdbus4.dll',
'qtdeclarative4.dll',
'qtgui4.dll',
'qtguid4.dll',
'qtintf70.dll',
'qtnetwork4.dll',
'qtopengl4.dll',
'qtscript4.dll',
'qtsql4.dll',
'qtsupport4.dll',
'qtsvg4.dll',
'qtwebkit4.dll',
'qtxml4.dll',
'qtxmlpatterns4.dll',
'quazip.dll',
'quest3_mtc.dll',
'qwave.dll',
'qwutil.dll',
'r.dll',
'rainmeter.dll',
'rapi.dll',
'rasdlg.dll',
'rblas.dll',
'rbsha.dll',
'rdctcpip.dll',
'rds32.dll',
'react-native-win32.dll',
'reagent.dll',
'records.dll',
'resultpage.dll',
'rlapack.dll',
'rmclient.dll',
'role3d.dll',
'room616_mtc.dll',
'rpcrt4.dll',
'rps32.dll',
'rstrtmgr.dll',
'rtutils.dll',
'rvcore.dll',
'rxffr.dll',
'rxruntim.dll',
'sal3.dll',
'salhelper3msc.dll',
'samcli.dll',
'sandboxbroker.dll',
'saphirdll.dll',
'sbiedll.dll',
'scculib.dll',
'sccut.dll',
'scesrv.dll',
'schedularnet.dll',
'scint.dll',
'scnpst32.dll',
'scnpst64.dll',
'scnpst64c.dll',
'sdl.dll',
'sdl2.dll',
'sdl2_gfx.dll',
'sdl2_image.dll',
'sdl2_mixer.dll',
'sdl2_ttf.dll',
'sdl_image.dll',
'sdl_mixer.dll',
'sdl_ttf.dll',
'secdb.dll',
'secur32.dll',
'security.dll',
'sensapi.dll',
'serialization.dll',
'settings.dll',
'setupapi.dll',
'setupengine.dll',
'sginfra.dll',
'shared.dll',
'sharedlibrary.dll',
'sharedu.dll',
'shcore.dll',
'shiboken2.abi3.dll',
'sibus.dll',
'sicl32.dll',
'siclrpc.dll',
'sicollection.dll',
'sicomm.dll',
'sidatadesc.dll',
'siexception.dll',
'sirow.dll',
'sirulereturn.dll',
'sistruct.dll',
'skywlib.dll',
'smime3.dll',
'smrt32.dll',
'sndvolsso.dll',
'so.5.5.23.dll',
'so.5.5.24.1.dll',
'so.5.5.24.2.dll',
'so.5.5.24.dll',
'softwareupdatefiles.dll',
'sos.dll',
'sotmi.dll',
'spaacis.dll',
'spbasic.dll',
'speedtreert.dll',
'spell32.dll',
'splutilities.dll',
'spoolss.dll',
'spp.dll',
'sql.dll',
'sqlite.dll',
'sqlite3.dll',
'sqmapi.dll',
'srpapi.dll',
'srvcli.dll',
'ssl3.dll',
'ssleay32.dll',
'ssm_api.dll',
'sspicli.dll',
'statsjunkysystem.dll',
'stddll32.dll',
'stddll40.dll',
'steam_api.dll',
'steam_api64.dll',
'sti.dll',
'stlpmt45.dll',
'stlport-w64r-20-1.dll',
'stlport.5.1.dll',
'stlport.5.2.dll',
'stlport_vc7145.dll',
'storm.dll',
'studyrunner.dll',
'sub_ctrl.dll',
'svc.binary.dll',
'svc.filesys.dll',
'svc.node.dll',
'svlmi.dll',
'svml_dispmd.dll',
'svt.dll',
'svtmi.dll',
'swipl.dll',
'swresample-1.dll',
'swresample-2.dll',
'swresample-3.dll',
'swscale-0.dll',
'swscale-2.dll',
'swscale-3.dll',
'swscale-4.dll',
'swscale-5.dll',
'systask.dll',
'system.dll',
'systemeventsbrokerclient.dll',
'systemutilities.dll',
't2embed.dll',
'tabsys.dll',
'taminstance.dll',
'tasks_core.dll',
'tbb.dll',
'tbbmalloc.dll',
'tbs.dll',
'tbstdobjs.dll',
'td_alloc_4.03_14.dll',
'td_alloc_20.6_15.dll',
'td_alloc_20.8_14.dll',
'td_dbcore_20.6_15.dll',
'td_ge_20.6_15.dll',
'td_root.dll',
'td_root_4.03_14.dll',
'td_root_20.8_14.dll',
'tdh.dll',
'tdl100.dll',
'tdl200.dll',
'tdl201.dll',
'tdl210.dll',
'tdl290.dll',
'telemetry.dll',
'ter32.dll',
'texteditor.dll',
'texteditor4.dll',
'textomsg.dll',
'tgp_monitor.dll',
'thrdutil.dll',
'tier0.dll',
'tier0_s.dll',
'tinyxml.dll',
'tkbrep.dll',
'tkernel.dll',
'tkg2d.dll',
'tkg3d.dll',
'tkgeomalgo.dll',
'tkgeombase.dll',
'tkmath.dll',
'tkmi.dll',
'tkshhealing.dll',
'tktopalgo.dll',
'tl.dll',
'tllo.dll',
'tlmi.dll',
'tmfoundation_gt2_release.dll',
'tmgeneral_release.dll',
'tmmetadata_release.dll',
'tmpdlg10.dll',
'tmpublic_algorithmframework_release.dll',
'tmpublic_core_release.dll',
'tmpublic_maskstorage_release.dll',
'tmpublic_math_release.dll',
'tmpublic_serialization_release.dll',
'tmpublic_voxelstorage_release.dll',
'tn3dls.dll',
'tools.dll',
'toom_mtc.dll',
'tp.dll',
'trace.dll',
'tracelog-4-0.dll',
'tracker.dll',
'traffic.dll',
'trayicon2.dll',
'truss100.dll',
'truss200.dll',
'tsduck.dll',
'twinapi.appcore.dll',
'tx_log.dll',
'txinterf.dll',
'txmlutil.dll',
'types16_mtc.dll',
'u.dll',
'u32base.dll',
'u32comm.dll',
'ubs_database.dll',
'ubs_datetime.dll',
'ubs_dbunidesys.dll',
'ubs_error.dll',
'ubs_evlog.dll',
'ubs_objects.dll',
'ubs_trace.dll',
'ubs_varios.dll',
'ucbhelper4msc.dll',
'ucrtbased.dll',
'ue4editor-core.dll',
'ue4editor-coreuobject.dll',
'ue4editor-engine.dll',
'ue4editor-inputcore.dll',
'ue4editor-slate.dll',
'ue4editor-slatecore.dll',
'ue4editor-unrealed.dll',
'ue4editor-unrealenginepython.dll',
'ui-service-provider.dll',
'ui.dll',
'uiautomationcore.dll',
'umpdc.dll',
'umpnpmgr.dll',
'unbcl.dll',
'uniansi.dll',
'unicode.dll',
'units.dll',
'unityplayer.dll',
'unrar.dll',
'url.dll',
'userdata.dll',
'userdatatypehelperutil.dll',
'usevtlog.dll',
'usp10.dll',
'usprfl2d.dll',
'util.dll',
'utildebuglog.dll',
'utilities.dll',
'utility.dll',
'utilmsgbuffer.dll',
'utils.dll',
'utils4.dll',
'utl.dll',
'utlmi.dll',
'uwinapi.dll',
'vaultcli.dll',
'vb40032.dll',
'vboxrt.dll',
'vccorlib140.dll',
'vccorlib140_app.dll',
'vcl.dll',
'vclmi.dll',
'vcomp90.dll',
'vcomp100.dll',
'vcomp120.dll',
'vcomp140.dll',
'vcomp140d.dll',
'vcruntime140.dll',
'vcruntime140_1.dll',
'vcruntime140_app.dll',
'vcruntime140d.dll',
'vcruntime140d_app.dll',
'vctl.dll',
'vdp_rdpvcbridge.dll',
'videoframe.dll',
'virtdisk.dll',
'visitcommon.dll',
'visusdataflow.dll',
'visusdb.dll',
'visusgui.dll',
'visusidx.dll',
'visuskernel.dll',
'vmprotectsdk32.dll',
'vo28gui.dll',
'vo28inet.dll',
'vo28ole.dll',
'vo28run.dll',
'vo28sys.dll',
'vocon3200_asr.dll',
'vocon3200_base.dll',
'vorbisfile.dll',
'vos3msc.dll',
'vproc2.dll',
'vsansi.dll',
'vsstrace.dll',
'vstdlib.dll',
'vstdlib_s.dll',
'vtkcommoncore-8.1.dll',
'vtkcommoncore-pv5.5.dll',
'vtkcommoncore-pv5.7.dll',
'vtkcommondatamodel-8.1.dll',
'vtools2.dll',
'vulkan-1.dll',
'w3btrv7.dll',
'wbtrv32.dll',
'wdscore.dll',
'we60x.dll',
'webres.dll',
'webservices.dll',
'wer.dll',
'wevtapi.dll',
'wfapi.dll',
'wglogin.dll',
'wimgapi.dll',
'win32msgqueue.dll',
'win32u.dll',
'winamp.dll',
'winbrand.dll',
'wincorlib.dll',
'windos95.dll',
'wingenfn.dll',
'winnsi.dll',
'winrtmex.dll',
'winsock.dll',
'winsparkle.dll',
'wintax.dll',
'wintrust.dll',
'winusb.dll',
'wiretap-1.11.0.dll',
'wkscli.dll',
'wlanapi.dll',
'wlanutil.dll',
'wldp.dll',
'wmiclnt.dll',
'wmsgapi.dll',
'wow32.dll',
'wow64.dll',
'wpcap.dll',
'wrapper.dll',
'wrs_module1.dll',
'ws2.dll',
'ws_log.dll',
'wsnmp32.dll',
'wt.dll',
'wthttp.dll',
'wvcore.dll',
'wwutils.dll',
'wxbase30u_gcc_custom.dll',
'wxmsw30u_core_gcc_custom.dll',
'x3daudio1_7.dll',
'x64-msvcrt-ruby240.dll',
'x64-msvcrt-ruby250.dll',
'x64-msvcrt-ruby260.dll',
'x_funms.dll',
'x_pdfms.dll',
'xapofx1_5.dll',
'xbtbase1.dll',
'xbtbase2.dll',
'xcdcpr.dll',
'xcdcprgeometrysources.dll',
'xcdfields.dll',
'xcdimaging.dll',
'xcdmath3d.dll',
'xcdmesh.dll',
'xcdutility.dll',
'xerces-c_2_6.dll',
'xerces-c_2_7.dll',
'xerces-c_3_1.dll',
'xgraphic32.dll',
'ximage.dll',
'xinput1_3.dll',
'xlluaruntime.dll',
'xmllite.dll',
'xpcom_core.dll',
'xppdbgc.dll',
'xppdui.dll',
'xpprt1.dll',
'xppsys.dll',
'xppui2.dll',
'xrcore.dll',
'xtpro.dll',
'xul.dll',
'yaml.dll',
'zip.dll',
'zlib.dll',
'zlib1.dll',
'zlibwapi.dll',
'zoom32.dll',
'ztdata.dll',
'ztframe.dll',
])
WIN7_DLLS = frozenset([
'aaclient.dll',
'accessibilitycpl.dll',
'acctres.dll',
'acledit.dll',
'aclui.dll',
'acppage.dll',
'acproxy.dll',
'actioncenter.dll',
'actioncentercpl.dll',
'actionqueue.dll',
'actxprxy.dll',
'admparse.dll',
'admtmpl.dll',
'adprovider.dll',
'adsldp.dll',
'adsldpc.dll',
'adsmsext.dll',
'adsnt.dll',
'adtschema.dll',
'advapi32.dll',
'advpack.dll',
'aecache.dll',
'aeevts.dll',
'aeinv.dll',
'aelupsvc.dll',
'aepdu.dll',
'alttab.dll',
'amstream.dll',
'amxread.dll',
'apds.dll',
'api-ms-win-core-xstate-l1-1-0.dll',
'apilogen.dll',
'apircl.dll',
'apisetschema.dll',
'apphlpdm.dll',
'appidapi.dll',
'appidpolicyengineapi.dll',
'appidsvc.dll',
'appinfo.dll',
'appmgmts.dll',
'appmgr.dll',
'apss.dll',
'asferror.dll',
'asycfilt.dll',
'atl.dll',
'audiodev.dll',
'audioeng.dll',
'audiokse.dll',
'audioses.dll',
'audiosrv.dll',
'auditcse.dll',
'auditnativesnapin.dll',
'auditpolicygpinterop.dll',
'auditpolmsg.dll',
'authfwcfg.dll',
'authfwgp.dll',
'authfwsnapin.dll',
'authfwwizfwk.dll',
'authui.dll',
'autoplay.dll',
'auxiliarydisplayapi.dll',
'auxiliarydisplayclassinstaller.dll',
'auxiliarydisplaycpl.dll',
'auxiliarydisplaydriverlib.dll',
'auxiliarydisplayservices.dll',
'avicap32.dll',
'avifil32.dll',
'axinstsv.dll',
'azroles.dll',
'azroleui.dll',
'azsqlext.dll',
'basecsp.dll',
'basesrv.dll',
'batmeter.dll',
'batt.dll',
'bcdprov.dll',
'bcdsrv.dll',
'bdehdcfglib.dll',
'bderepair.dll',
'bdesvc.dll',
'bdeui.dll',
'bfe.dll',
'bidispl.dll',
'biocpl.dll',
'biocredprov.dll',
'bitsigd.dll',
'bitsperf.dll',
'bitsprx2.dll',
'bitsprx3.dll',
'bitsprx4.dll',
'bitsprx5.dll',
'bitsprx6.dll',
'blackbox.dll',
'blb_ps.dll',
'blbevents.dll',
'blbres.dll',
'bootres.dll',
'bootstr.dll',
'bootvid.dll',
'brdgcfg.dll',
'bridgeres.dll',
'browcli.dll',
'browser.dll',
'browseui.dll',
'bthci.dll',
'bthmtpcontexthandler.dll',
'bthpanapi.dll',
'bthserv.dll',
'btpanui.dll',
'bwunpairelevated.dll',
'c_g18030.dll',
'c_is2022.dll',
'c_iscii.dll',
'cabview.dll',
'capiprovider.dll',
'capisp.dll',
'cardgames.dll',
'catsrv.dll',
'catsrvps.dll',
'catsrvut.dll',
'cca.dll',
'cdd.dll',
'cdosys.dll',
'certcli.dll',
'certcredprovider.dll',
'certenc.dll',
'certenroll.dll',
'certenrollui.dll',
'certmgr.dll',
'certpoleng.dll',
'certprop.dll',
'cewmdm.dll',
'cfgbkend.dll',
'chkwudrv.dll',
'chsbrkr.dll',
'chtbrkr.dll',
'chxreadingstringime.dll',
'ci.dll',
'cic.dll',
'circoinst.dll',
'clb.dll',
'clbcatq.dll',
'clfsw32.dll',
'cliconfg.dll',
'clusapi.dll',
'cmcfg32.dll',
'cmdial32.dll',
'cmicryptinstall.dll',
'cmifw.dll',
'cmipnpinstall.dll',
'cmlua.dll',
'cmnclim.dll',
'cmpbk32.dll',
'cmstplua.dll',
'cmutil.dll',
'cngaudit.dll',
'cngprovider.dll',
'cnvfat.dll',
'cofiredm.dll',
'colbact.dll',
'colorcnv.dll',
'colorui.dll',
'comcat.dll',
'comctl32.dll',
'comdlg32.dll',
'compstui.dll',
'comrepl.dll',
'comres.dll',
'comsnap.dll',
'comsvcs.dll',
'comuid.dll',
'connect.dll',
'console.dll',
'corpol.dll',
'correngine.dll',
'cpfilters.dll',
'credssp.dll',
'crppresentation.dll',
'crtdll.dll',
'cryptdlg.dll',
'cryptdll.dll',
'cryptext.dll',
'cryptnet.dll',
'cryptsvc.dll',
'cscdll.dll',
'cscmig.dll',
'cscobj.dll',
'cscsvc.dll',
'cscui.dll',
'csrsrv.dll',
'ctl3d32.dll',
'd3d8thk.dll',
'd3d10_1core.dll',
'd3d10core.dll',
'd3d10level9.dll',
'd3d10warp.dll',
'd3dim.dll',
'd3dim700.dll',
'd3dramp.dll',
'dataclen.dll',
'davclnt.dll',
'davhlpr.dll',
'dbnetlib.dll',
'dbnmpntw.dll',
'dciman32.dll',
'ddaclsys.dll',
'ddoiproxy.dll',
'ddores.dll',
'ddrawex.dll',
'defaultlocationcpl.dll',
'defragproxy.dll',
'defragsvc.dll',
'deskadp.dll',
'deskmon.dll',
'deskperf.dll',
'devenum.dll',
'devicecenter.dll',
'devicedisplaystatusmanager.dll',
'devicemetadataparsers.dll',
'devicepairing.dll',
'devicepairingfolder.dll',
'devicepairinghandler.dll',
'devicepairingproxy.dll',
'deviceuxres.dll',
'devrtl.dll',
'dfdts.dll',
'dfscli.dll',
'dfshim.dll',
'dfsshlex.dll',
'dhcpcmonitor.dll',
'dhcpcore.dll',
'dhcpcore6.dll',
'dhcpqec.dll',
'dhcpsapi.dll',
'diagcpl.dll',
'diagperf.dll',
'dimsjob.dll',
'dimsroam.dll',
'diskcopy.dll',
'dispci.dll',
'dispex.dll',
'display.dll',
'dmband.dll',
'dmcompos.dll',
'dmdlgs.dll',
'dmdskmgr.dll',
'dmdskres.dll',
'dmdskres2.dll',
'dmime.dll',
'dmintf.dll',
'dmloader.dll',
'dmocx.dll',
'dmrc.dll',
'dmscript.dll',
'dmstyle.dll',
'dmsynth.dll',
'dmusic.dll',
'dmutil.dll',
'dmvdsitf.dll',
'dnsapi.dll',
'dnscmmc.dll',
'dnsext.dll',
'dnshc.dll',
'dnsrslvr.dll',
'docprop.dll',
'documentperformanceevents.dll',
'dot3api.dll',
'dot3cfg.dll',
'dot3dlg.dll',
'dot3gpclnt.dll',
'dot3gpui.dll',
'dot3hc.dll',
'dot3msm.dll',
'dot3svc.dll',
'dot3ui.dll',
'dpapiprovider.dll',
'dpmodemx.dll',
'dpnaddr.dll',
'dpnathlp.dll',
'dpnet.dll',
'dpnhpast.dll',
'dpnhupnp.dll',
'dpnlobby.dll',
'dps.dll',
'dpwsockx.dll',
'dpx.dll',
'drmmgrtn.dll',
'drmv2clt.dll',
'drprov.dll',
'drt.dll',
'drtprov.dll',
'drttransport.dll',
'ds32gt.dll',
'dsauth.dll',
'dsdmo.dll',
'dshowrdpfilter.dll',
'dskquota.dll',
'dskquoui.dll',
'dsound.dll',
'dsprop.dll',
'dsquery.dll',
'dssec.dll',
'dssenh.dll',
'dsuiext.dll',
'dswave.dll',
'dtsh.dll',
'dwmapi.dll',
'dwmcore.dll',
'dwmredir.dll',
'dwrite.dll',
'dxdiagn.dll',
'dxmasf.dll',
'dxp.dll',
'dxpps.dll',
'dxptaskringtone.dll',
'dxptasksync.dll',
'dxtmsft.dll',
'dxtrans.dll',
'eapp3hst.dll',
'eappgnui.dll',
'eapphost.dll',
'eappprxy.dll',
'eapqec.dll',
'eapsvc.dll',
'efscore.dll',
'efslsaext.dll',
'efssvc.dll',
'ehstorapi.dll',
'ehstorpwdmgr.dll',
'ehstorshell.dll',
'els.dll',
'elscore.dll',
'elslad.dll',
'elstrans.dll',
'encapi.dll',
'encdec.dll',
'encdump.dll',
'energy.dll',
'eqossnap.dll',
'es.dll',
'esent.dll',
'esentprf.dll',
'eventcls.dll',
'explorerframe.dll',
'expsrv.dll',
'f3ahvoas.dll',
'faultrep.dll',
'fdbth.dll',
'fdbthproxy.dll',
'fde.dll',
'fdeploy.dll',
'fdphost.dll',
'fdpnp.dll',
'fdprint.dll',
'fdproxy.dll',
'fdrespub.dll',
'fdssdp.dll',
'fdwcn.dll',
'fdwnet.dll',
'fdwsd.dll',
'feclient.dll',
'feedbacktool.dll',
'filemgmt.dll',
'findnetprinters.dll',
'firewallapi.dll',
'firewallcontrolpanel.dll',
'fltlib.dll',
'fmifs.dll',
'fntcache.dll',
'fontext.dll',
'fphc.dll',
'framebuf.dll',
'framedyn.dll',
'fthsvc.dll',
'fundisc.dll',
'fveapi.dll',
'fveapibase.dll',
'fvecerts.dll',
'fvecpl.dll',
'fverecover.dll',
'fveui.dll',
'fvewiz.dll',
'fwcfg.dll',
'fwpuclnt.dll',
'fwremotesvr.dll',
'fxsapi.dll',
'fxscom.dll',
'fxscomex.dll',
'fxscompose.dll',
'fxscomposeres.dll',
'fxsevent.dll',
'fxsext32.dll',
'fxsmon.dll',
'fxsresm.dll',
'fxsroute.dll',
'fxsst.dll',
'fxst30.dll',
'fxstiff.dll',
'fxsutility.dll',
'fxsxp32.dll',
'gacinstall.dll',
'gameux.dll',
'gameuxlegacygdfs.dll',
'gcdef.dll',
'gdi32.dll',
'getuname.dll',
'glmf32.dll',
'gpapi.dll',
'gpedit.dll',
'gpprefcl.dll',
'gpprnext.dll',
'gpscript.dll',
'gpsvc.dll',
'gptext.dll',
'groupinghc.dll',
'hal.dll',
'halacpi.dll',
'halmacpi.dll',
'hbaapi.dll',
'hcproviders.dll',
'helppaneproxy.dll',
'hgcpl.dll',
'hgprint.dll',
'hhsetup.dll',
'hidserv.dll',
'hnetcfg.dll',
'hnetmon.dll',
'hotplug.dll',
'hotstartuseragent.dll',
'htui.dll',
'ias.dll',
'iasacct.dll',
'iasads.dll',
'iasdatastore.dll',
'iashlpr.dll',
'iasmigplugin.dll',
'iasnap.dll',
'iaspolcy.dll',
'iasrad.dll',
'iasrecst.dll',
'iassam.dll',
'iassdo.dll',
'iassvcs.dll',
'icaapi.dll',
'icardie.dll',
'icardres.dll',
'iccoinstall.dll',
'icfupgd.dll',
'icm32.dll',
'icmui.dll',
'iconcodecservice.dll',
'icsigd.dll',
'idlisten.dll',
'idndl.dll',
'idstore.dll',
'ieakeng.dll',
'ieaksie.dll',
'ieakui.dll',
'ieapfltr.dll',
'iedkcs32.dll',
'iepeers.dll',
'iernonce.dll',
'iesetup.dll',
'iesysprep.dll',
'ifmon.dll',
'ifsutil.dll',
'ifsutilx.dll',
'igddiag.dll',
'ikeext.dll',
'imagehlp.dll',
'imageres.dll',
'imagesp1.dll',
'imapi.dll',
'imapi2.dll',
'imapi2fs.dll',
'imjp10k.dll',
'inetmib1.dll',
'inetpp.dll',
'inetppui.dll',
'inetres.dll',
'infocardapi.dll',
'inked.dll',
'input.dll',
'inseng.dll',
'iologmsg.dll',
'ipbusenum.dll',
'ipbusenumproxy.dll',
'iphlpapi.dll',
'iphlpsvc.dll',
'ipnathlp.dll',
'iprop.dll',
'iprtprio.dll',
'iprtrmgr.dll',
'ipsecsnp.dll',
'ipsecsvc.dll',
'ipsmsnap.dll',
'irclass.dll',
'irmon.dll',
'iscsicpl.dll',
'iscsidsc.dll',
'iscsied.dll',
'iscsiexe.dll',
'iscsilog.dll',
'iscsium.dll',
'iscsiwmi.dll',
'itircl.dll',
'itss.dll',
'itvdata.dll',
'iyuv_32.dll',
'jnwmon.dll',
'jscript.dll',
'jsproxy.dll',
'kbd101.dll',
'kbd101a.dll',
'kbd101b.dll',
'kbd101c.dll',
'kbd103.dll',
'kbd106.dll',
'kbd106n.dll',
'kbda1.dll',
'kbda2.dll',
'kbda3.dll',
'kbdal.dll',
'kbdarme.dll',
'kbdarmw.dll',
'kbdax2.dll',
'kbdaze.dll',
'kbdazel.dll',
'kbdbash.dll',
'kbdbe.dll',
'kbdbene.dll',
'kbdbgph.dll',
'kbdbgph1.dll',
'kbdbhc.dll',
'kbdblr.dll',
'kbdbr.dll',
'kbdbu.dll',
'kbdbulg.dll',
'kbdca.dll',
'kbdcan.dll',
'kbdcr.dll',
'kbdcz.dll',
'kbdcz1.dll',
'kbdcz2.dll',
'kbdda.dll',
'kbddiv1.dll',
'kbddiv2.dll',
'kbddv.dll',
'kbdes.dll',
'kbdest.dll',
'kbdfa.dll',
'kbdfc.dll',
'kbdfi.dll',
'kbdfi1.dll',
'kbdfo.dll',
'kbdfr.dll',
'kbdgae.dll',
'kbdgeo.dll',
'kbdgeoer.dll',
'kbdgeoqw.dll',
'kbdgkl.dll',
'kbdgr.dll',
'kbdgr1.dll',
'kbdgrlnd.dll',
'kbdhau.dll',
'kbdhe.dll',
'kbdhe220.dll',
'kbdhe319.dll',
'kbdheb.dll',
'kbdhela2.dll',
'kbdhela3.dll',
'kbdhept.dll',
'kbdhu.dll',
'kbdhu1.dll',
'kbdibm02.dll',
'kbdibo.dll',
'kbdic.dll',
'kbdinasa.dll',
'kbdinbe1.dll',
'kbdinbe2.dll',
'kbdinben.dll',
'kbdindev.dll',
'kbdinguj.dll',
'kbdinhin.dll',
'kbdinkan.dll',
'kbdinmal.dll',
'kbdinmar.dll',
'kbdinori.dll',
'kbdinpun.dll',
'kbdintam.dll',
'kbdintel.dll',
'kbdinuk2.dll',
'kbdir.dll',
'kbdit.dll',
'kbdit142.dll',
'kbdiulat.dll',
'kbdjpn.dll',
'kbdkaz.dll',
'kbdkhmr.dll',
'kbdkor.dll',
'kbdkyr.dll',
'kbdla.dll',
'kbdlao.dll',
'kbdlk41a.dll',
'kbdlt.dll',
'kbdlt1.dll',
'kbdlt2.dll',
'kbdlv.dll',
'kbdlv1.dll',
'kbdmac.dll',
'kbdmacst.dll',
'kbdmaori.dll',
'kbdmlt47.dll',
'kbdmlt48.dll',
'kbdmon.dll',
'kbdmonmo.dll',
'kbdne.dll',
'kbdnec.dll',
'kbdnec95.dll',
'kbdnecat.dll',
'kbdnecnt.dll',
'kbdnepr.dll',
'kbdno.dll',
'kbdno1.dll',
'kbdnso.dll',
'kbdpash.dll',
'kbdpl.dll',
'kbdpl1.dll',
'kbdpo.dll',
'kbdro.dll',
'kbdropr.dll',
'kbdrost.dll',
'kbdru.dll',
'kbdru1.dll',
'kbdsf.dll',
'kbdsg.dll',
'kbdsl.dll',
'kbdsl1.dll',
'kbdsmsfi.dll',
'kbdsmsno.dll',
'kbdsn1.dll',
'kbdsorex.dll',
'kbdsors1.dll',
'kbdsorst.dll',
'kbdsp.dll',
'kbdsw.dll',
'kbdsw09.dll',
'kbdsyr1.dll',
'kbdsyr2.dll',
'kbdtajik.dll',
'kbdtat.dll',
'kbdth0.dll',
'kbdth1.dll',
'kbdth2.dll',
'kbdth3.dll',
'kbdtiprc.dll',
'kbdtuf.dll',
'kbdtuq.dll',
'kbdturme.dll',
'kbdughr.dll',
'kbdughr1.dll',
'kbduk.dll',
'kbdukx.dll',
'kbdur.dll',
'kbdur1.dll',
'kbdurdu.dll',
'kbdus.dll',
'kbdusa.dll',
'kbdusl.dll',
'kbdusr.dll',
'kbdusx.dll',
'kbduzb.dll',
'kbdvntc.dll',
'kbdwol.dll',
'kbdyak.dll',
'kbdyba.dll',
'kbdycc.dll',
'kbdycl.dll',
'kd1394.dll',
'kdcom.dll',
'kdusb.dll',
'kerberos.dll',
'kernel32.dll',
'kernelbase.dll',
'kernelceip.dll',
'keyiso.dll',
'keymgr.dll',
'kmsvc.dll',
'korwbrkr.dll',
'ksuser.dll',
'l2gpstore.dll',
'l2nacp.dll',
'l2sechc.dll',
'langcleanupsysprepaction.dll',
'laprxy.dll',
'licmgr10.dll',
'listsvc.dll',
'lltdapi.dll',
'lltdres.dll',
'lltdsvc.dll',
'lmhsvc.dll',
'loadperf.dll',
'localsec.dll',
'localspl.dll',
'localui.dll',
'locationapi.dll',
'loghours.dll',
'lpk.dll',
'lpksetupproxyserv.dll',
'lsasrv.dll',
'lsmproxy.dll',
'luainstall.dll',
'lz32.dll',
'magnification.dll',
'mapistub.dll',
'mcewmdrmndbootstrap.dll',
'mciavi32.dll',
'mcicda.dll',
'mciqtz32.dll',
'mciseq.dll',
'mciwave.dll',
'mcmde.dll',
'mcsrchph.dll',
'mctres.dll',
'mcupdate_authenticamd.dll',
'mcupdate_genuineintel.dll',
'mcx2svc.dll',
'mcxdriv.dll',
'mdminst.dll',
'mediametadatahandler.dll',
'memdiag.dll',
'mf3216.dll',
'mfaacenc.dll',
'mfc40.dll',
'mfc40u.dll',
'mfc42.dll',
'mfc42u.dll',
'mfcsubs.dll',
'mfds.dll',
'mfdvdec.dll',
'mferror.dll',
'mfh264enc.dll',
'mfmjpegdec.dll',
'mfplay.dll',
'mfps.dll',
'mfvdsp.dll',
'mfwmaaec.dll',
'microsoft-windows-hal-events.dll',
'microsoft-windows-kernel-power-events.dll',
'microsoft-windows-kernel-processor-power-events.dll',
'midimap.dll',
'migisol.dll',
'miguiresource.dll',
'mimefilt.dll',
'mmcbase.dll',
'mmci.dll',
'mmcico.dll',
'mmcndmgr.dll',
'mmcshext.dll',
'mmcss.dll',
'mmdevapi.dll',
'mmres.dll',
'modemui.dll',
'montr_ci.dll',
'moricons.dll',
'mp3dmod.dll',
'mp4sdecd.dll',
'mp43decd.dll',
'mpg4decd.dll',
'mpr.dll',
'mprapi.dll',
'mprddm.dll',
'mprdim.dll',
'mprmsg.dll',
'mpssvc.dll',
'msaatext.dll',
'msac3enc.dll',
'msacm32.dll',
'msafd.dll',
'msaudite.dll',
'mscandui.dll',
'mscat32.dll',
'msclmd.dll',
'mscms.dll',
'mscorier.dll',
'mscories.dll',
'mscpx32r.dll',
'mscpxl32.dll',
'msctf.dll',
'msctfmonitor.dll',
'msctfp.dll',
'msctfui.dll',
'msdadiag.dll',
'msdelta.dll',
'msdri.dll',
'msdtckrm.dll',
'msdtclog.dll',
'msdtcprx.dll',
'msdtctm.dll',
'msdtcuiu.dll',
'msdtcvsp1res.dll',
'msexch40.dll',
'msexcl40.dll',
'msfeedsbs.dll',
'msftedit.dll',
'mshtmled.dll',
'mshtmler.dll',
'msicofire.dll',
'msidcrl30.dll',
'msident.dll',
'msidle.dll',
'msidntld.dll',
'msieftp.dll',
'msihnd.dll',
'msimg32.dll',
'msimsg.dll',
'msimtf.dll',
'msisip.dll',
'msjet40.dll',
'msjetoledb40.dll',
'msjint40.dll',
'msjter40.dll',
'msjtes40.dll',
'msltus40.dll',
'msmmsp.dll',
'msmpeg2adec.dll',
'msmpeg2enc.dll',
'msmpeg2vdec.dll',
'msnetobj.dll',
'msobjs.dll',
'msoeacct.dll',
'msoert2.dll',
'msorc32r.dll',
'msorcl32.dll',
'mspatcha.dll',
'mspbda.dll',
'mspbdacoinst.dll',
'mspbde40.dll',
'msports.dll',
'msprivs.dll',
'msrahc.dll',
'msrating.dll',
'msrd2x40.dll',
'msrd3x40.dll',
'msrdc.dll',
'msrdpwebaccess.dll',
'msrepl40.dll',
'msrle32.dll',
'msscntrs.dll',
'msscp.dll',
'mssha.dll',
'msshavmsg.dll',
'msshooks.dll',
'mssign32.dll',
'mssip32.dll',
'mssitlb.dll',
'mssph.dll',
'mssphtb.dll',
'mssprxy.dll',
'mssrch.dll',
'mssvp.dll',
'msswch.dll',
'mstask.dll',
'mstext40.dll',
'mstime.dll',
'mstscax.dll',
'msutb.dll',
'msv1_0.dll',
'msvbvm60.dll',
'msvcirt.dll',
'msvcp60.dll',
'msvcr70.dll',
'msvcrt.dll',
'msvcrt40.dll',
'msvfw32.dll',
'msvidc32.dll',
'msvidctl.dll',
'mswdat10.dll',
'mswmdm.dll',
'mswsock.dll',
'mswstr10.dll',
'msxbde40.dll',
'msxml3.dll',
'msxml3r.dll',
'msxml6.dll',
'msxml6r.dll',
'msyuv.dll',
'mtxclu.dll',
'mtxdm.dll',
'mtxex.dll',
'mtxlegih.dll',
'mtxoci.dll',
'muifontsetup.dll',
'muilanguagecleanup.dll',
'mycomput.dll',
'mydocs.dll',
'napcrypt.dll',
'napdsnap.dll',
'naphlpr.dll',
'napinsp.dll',
'napipsec.dll',
'napmontr.dll',
'nativehooks.dll',
'naturallanguage6.dll',
'ncdprop.dll',
'nci.dll',
'ncryptui.dll',
'ncsi.dll',
'nddeapi.dll',
'ndfetw.dll',
'ndfhcdiscovery.dll',
'ndiscapcfg.dll',
'ndishc.dll',
'ndproxystub.dll',
'negoexts.dll',
'netapi32.dll',
'netbios.dll',
'netcenter.dll',
'netcfgx.dll',
'netcorehc.dll',
'netdiagfx.dll',
'netevent.dll',
'netfxperf.dll',
'neth.dll',
'netid.dll',
'netiohlp.dll',
'netjoin.dll',
'netlogon.dll',
'netman.dll',
'netmsg.dll',
'netplwiz.dll',
'netprof.dll',
'netprofm.dll',
'netprojw.dll',
'netshell.dll',
'nettrace.dll',
'networkexplorer.dll',
'networkitemfactory.dll',
'networkmap.dll',
'nlaapi.dll',
'nlahc.dll',
'nlasvc.dll',
'nlhtml.dll',
'nlmgp.dll',
'nlmsprep.dll',
'nlsbres.dll',
'nlsdata0000.dll',
'nlsdata000a.dll',
'nlsdata000c.dll',
'nlsdata0c1a.dll',
'nlsdata000d.dll',
'nlsdata000f.dll',
'nlsdata0001.dll',
'nlsdata001a.dll',
'nlsdata001b.dll',
'nlsdata001d.dll',
'nlsdata0002.dll',
'nlsdata002a.dll',
'nlsdata0003.dll',
'nlsdata003e.dll',
'nlsdata004a.dll',
'nlsdata004b.dll',
'nlsdata004c.dll',
'nlsdata004e.dll',
'nlsdata0007.dll',
'nlsdata0009.dll',
'nlsdata0010.dll',
'nlsdata0011.dll',
'nlsdata0013.dll',
'nlsdata0018.dll',
'nlsdata0019.dll',
'nlsdata0020.dll',
'nlsdata0021.dll',
'nlsdata0022.dll',
'nlsdata0024.dll',
'nlsdata0026.dll',
'nlsdata0027.dll',
'nlsdata0039.dll',
'nlsdata0045.dll',
'nlsdata0046.dll',
'nlsdata0047.dll',
'nlsdata0049.dll',
'nlsdata081a.dll',
'nlsdata0414.dll',
'nlsdata0416.dll',
'nlsdata0816.dll',
'nlsdl.dll',
'nlslexicons000a.dll',
'nlslexicons000c.dll',
'nlslexicons0c1a.dll',
'nlslexicons000d.dll',
'nlslexicons000f.dll',
'nlslexicons0001.dll',
'nlslexicons001a.dll',
'nlslexicons001b.dll',
'nlslexicons001d.dll',
'nlslexicons0002.dll',
'nlslexicons002a.dll',
'nlslexicons0003.dll',
'nlslexicons003e.dll',
'nlslexicons004a.dll',
'nlslexicons004b.dll',
'nlslexicons004c.dll',
'nlslexicons004e.dll',
'nlslexicons0007.dll',
'nlslexicons0009.dll',
'nlslexicons0010.dll',
'nlslexicons0011.dll',
'nlslexicons0013.dll',
'nlslexicons0018.dll',
'nlslexicons0019.dll',
'nlslexicons0020.dll',
'nlslexicons0021.dll',
'nlslexicons0022.dll',
'nlslexicons0024.dll',
'nlslexicons0026.dll',
'nlslexicons0027.dll',
'nlslexicons0039.dll',
'nlslexicons0045.dll',
'nlslexicons0046.dll',
'nlslexicons0047.dll',
'nlslexicons0049.dll',
'nlslexicons081a.dll',
'nlslexicons0414.dll',
'nlslexicons0416.dll',
'nlslexicons0816.dll',
'nlsmodels0011.dll',
'npmproxy.dll',
'nrpsrv.dll',
'nshhttp.dll',
'nshipsec.dll',
'nshwfp.dll',
'nsisvc.dll',
'ntdll.dll',
'ntlanman.dll',
'ntlanui2.dll',
'ntmarta.dll',
'ntprint.dll',
'ntvdmd.dll',
'objsel.dll',
'ocsetapi.dll',
'odbc32gt.dll',
'odbcbcp.dll',
'odbcconf.dll',
'odbccr32.dll',
'odbccu32.dll',
'odbcint.dll',
'odbcji32.dll',
'odbcjt32.dll',
'odbctrac.dll',
'oddbse32.dll',
'odexl32.dll',
'odfox32.dll',
'odpdx32.dll',
'odtext32.dll',
'offfilt.dll',
'ogldrv.dll',
'ole32.dll',
'oleacc.dll',
'oleacchooks.dll',
'oleaccrc.dll',
'oleaut32.dll',
'olecli32.dll',
'oledlg.dll',
'oleprn.dll',
'oleres.dll',
'olesvr32.dll',
'olethk32.dll',
'onex.dll',
'onexui.dll',
'onlineidcpl.dll',
'oobefldr.dll',
'opcservices.dll',
'osbaseln.dll',
'osuninst.dll',
'p2p.dll',
'p2pcollab.dll',
'p2pgraph.dll',
'p2pnetsh.dll',
'p2psvc.dll',
'packager.dll',
'panmap.dll',
'pautoenr.dll',
'pcadm.dll',
'pcaevts.dll',
'pcasvc.dll',
'pcaui.dll',
'pcwutl.dll',
'pdhui.dll',
'peerdist.dll',
'peerdisthttptrans.dll',
'peerdistsh.dll',
'peerdistsvc.dll',
'peerdistwsddiscoprov.dll',
'perfcentercpl.dll',
'perfctrs.dll',
'perfdisk.dll',
'perfnet.dll',
'perfos.dll',
'perfproc.dll',
'perftrack.dll',
'perfts.dll',
'photometadatahandler.dll',
'photowiz.dll',
'pid.dll',
'pidgenx.dll',
'pifmgr.dll',
'pku2u.dll',
'pla.dll',
'playsndsrv.dll',
'pmcsnap.dll',
'pngfilt.dll',
'pnidui.dll',
'pnpsetup.dll',
'pnpts.dll',
'pnpui.dll',
'pnpxassoc.dll',
'pnpxassocprx.dll',
'pnrpauto.dll',
'pnrphc.dll',
'pnrpnsp.dll',
'pnrpsvc.dll',
'polstore.dll',
'portabledeviceapi.dll',
'portabledeviceclassextension.dll',
'portabledeviceconnectapi.dll',
'portabledevicestatus.dll',
'portabledevicesyncprovider.dll',
'portabledevicetypes.dll',
'portabledevicewiacompat.dll',
'portabledevicewmdrm.dll',
'pots.dll',
'powercpl.dll',
'ppcsnap.dll',
'presentationcffrasterizernative_v0300.dll',
'presentationhostproxy.dll',
'presentationnative_v0300.dll',
'prflbmsg.dll',
'printfilterpipelineprxy.dll',
'printisolationproxy.dll',
'printui.dll',
'prncache.dll',
'prnfldr.dll',
'prnntfy.dll',
'procinst.dll',
'profprov.dll',
'profsvc.dll',
'provsvc.dll',
'provthrd.dll',
'psapi.dll',
'psbase.dll',
'pshed.dll',
'psisdecd.dll',
'pstorec.dll',
'pstorsvc.dll',
'puiapi.dll',
'puiobj.dll',
'pwrshplugin.dll',
'qagent.dll',
'qagentrt.dll',
'qasf.dll',
'qcap.dll',
'qcliprov.dll',
'qdv.dll',
'qdvd.dll',
'qedit.dll',
'qedwipes.dll',
'qmgr.dll',
'qmgrprxy.dll',
'qshvhost.dll',
'qsvrmgmt.dll',
'quartz.dll',
'query.dll',
'qutil.dll',
'racengn.dll',
'racpldlg.dll',
'radardt.dll',
'radarrs.dll',
'rasadhlp.dll',
'rasapi32.dll',
'rasauto.dll',
'rascfg.dll',
'raschap.dll',
'rasctrs.dll',
'rasdiag.dll',
'rasgcw.dll',
'rasman.dll',
'rasmans.dll',
'rasmbmgr.dll',
'rasmm.dll',
'rasmontr.dll',
'rasmxs.dll',
'rasplap.dll',
'rasppp.dll',
'rasser.dll',
'rastapi.dll',
'rastls.dll',
'rdpcfgex.dll',
'rdpcore.dll',
'rdpcorekmts.dll',
'rdpd3d.dll',
'rdpdd.dll',
'rdpencdd.dll',
'rdpencom.dll',
'rdpendp.dll',
'rdprefdd.dll',
'rdprefdrvapi.dll',
'rdpwsx.dll',
'reagent.dll',
'recovery.dll',
'regapi.dll',
'regctrl.dll',
'regidle.dll',
'regsvc.dll',
'remotepg.dll',
'resampledmo.dll',
'resutils.dll',
'rgb9rast.dll',
'riched20.dll',
'riched32.dll',
'rnr20.dll',
'rpcdiag.dll',
'rpcepmap.dll',
'rpchttp.dll',
'rpcndfp.dll',
'rpcns4.dll',
'rpcnsh.dll',
'rpcrtremote.dll',
'rpcss.dll',
'rsaenh.dll',
'rshx32.dll',
'rstrtmgr.dll',
'rtffilt.dll',
'rtm.dll',
'samlib.dll',
'sampleres.dll',
'samsrv.dll',
'sas.dll',
'sbe.dll',
'sbeio.dll',
'sberes.dll',
'scansetting.dll',
'scarddlg.dll',
'scardsvr.dll',
'sccls.dll',
'scecli.dll',
'scext.dll',
'schannel.dll',
'schedcli.dll',
'schedsvc.dll',
'scksp.dll',
'scripto.dll',
'scrobj.dll',
'scrptadm.dll',
'scrrun.dll',
'sdautoplay.dll',
'sdcpl.dll',
'sdengin2.dll',
'sdhcinst.dll',
'sdiageng.dll',
'sdiagprv.dll',
'sdiagschd.dll',
'sdohlp.dll',
'sdrsvc.dll',
'sdshext.dll',
'searchfolder.dll',
'sechost.dll',
'seclogon.dll',
'secproc.dll',
'secproc_isv.dll',
'secproc_ssp.dll',
'secproc_ssp_isv.dll',
'sendmail.dll',
'sens.dll',
'sensapi.dll',
'sensorsapi.dll',
'sensorsclassextension.dll',
'sensorscpl.dll',
'sensrsvc.dll',
'serialui.dll',
'serwvdrv.dll',
'sessenv.dll',
'setbcdlocale.dll',
'setupcln.dll',
'setupetw.dll',
'sfc.dll',
'sfc_os.dll',
'shacct.dll',
'sharemediacpl.dll',
'shdocvw.dll',
'shell32.dll',
'shellstyle.dll',
'shfolder.dll',
'shgina.dll',
'shimeng.dll',
'shimgvw.dll',
'shlwapi.dll',
'shpafact.dll',
'shsetup.dll',
'shsvcs.dll',
'shunimpl.dll',
'shwebsvc.dll',
'signdrv.dll',
'sisbkup.dll',
'slc.dll',
'slcext.dll',
'slwga.dll',
'smartcardcredentialprovider.dll',
'smbhelperclass.dll',
'smiengine.dll',
'sndvolsso.dll',
'snmpapi.dll',
'sntsearch.dll',
'softkbd.dll',
'softpub.dll',
'sortserver2003compat.dll',
'sortwindows6compat.dll',
'spbcd.dll',
'spcmsg.dll',
'sperror.dll',
'spfileq.dll',
'spinf.dll',
'spnet.dll',
'spopk.dll',
'sppc.dll',
'sppcc.dll',
'sppcext.dll',
'sppcomapi.dll',
'sppcommdlg.dll',
'sppinst.dll',
'sppnp.dll',
'sppobjs.dll',
'sppuinotify.dll',
'sppwinob.dll',
'sppwmi.dll',
'spwinsat.dll',
'spwizeng.dll',
'spwizimg.dll',
'spwizres.dll',
'spwizui.dll',
'spwmp.dll',
'sqlceoledb30.dll',
'sqlceqp30.dll',
'sqlcese30.dll',
'sqlsrv32.dll',
'sqlunirl.dll',
'sqlwid.dll',
'sqlwoa.dll',
'srchadmin.dll',
'srclient.dll',
'srcore.dll',
'srhelper.dll',
'srpuxnativesnapin.dll',
'srrstr.dll',
'srvsvc.dll',
'srwmi.dll',
'sscore.dll',
'ssdpapi.dll',
'ssdpsrv.dll',
'sspisrv.dll',
'ssshim.dll',
'sstpsvc.dll',
'stclient.dll',
'sti_ci.dll',
'stobject.dll',
'storagecontexthandler.dll',
'storprop.dll',
'streamci.dll',
'structuredquery.dll',
'sud.dll',
'swprv.dll',
'sxproxy.dll',
'sxs.dll',
'sxshared.dll',
'sxssrv.dll',
'sxsstore.dll',
'synccenter.dll',
'synceng.dll',
'synchostps.dll',
'syncinfrastructure.dll',
'syncinfrastructureps.dll',
'syncreg.dll',
'syncui.dll',
'sysclass.dll',
'sysfxui.dll',
'sysmain.dll',
'sysntfy.dll',
'sysprepmce.dll',
'syssetup.dll',
'systemcpl.dll',
'tabbtn.dll',
'tabbtnex.dll',
'tabsvc.dll',
'tapi3.dll',
'tapi32.dll',
'tapilua.dll',
'tapimigplugin.dll',
'tapiperf.dll',
'tapisrv.dll',
'tapisysprep.dll',
'tapiui.dll',
'taskbarcpl.dll',
'taskcomp.dll',
'taskschd.dll',
'taskschdps.dll',
'tbssvc.dll',
'tcpipcfg.dll',
'tcpmib.dll',
'tcpmon.dll',
'tcpmonui.dll',
'termmgr.dll',
'termsrv.dll',
'thawbrkr.dll',
'themecpl.dll',
'themeservice.dll',
'themeui.dll',
'thumbcache.dll',
'timedatemuicallback.dll',
'tlscsp.dll',
'tpmcompc.dll',
'tquery.dll',
'trapi.dll',
'trkwks.dll',
'tsbyuv.dll',
'tscfgwmi.dll',
'tschannel.dll',
'tsddd.dll',
'tserrredir.dll',
'tsgqec.dll',
'tsmf.dll',
'tspkg.dll',
'tspnprdrcoinstaller.dll',
'tspubwmi.dll',
'tsworkspace.dll',
'tvratings.dll',
'twext.dll',
'txflog.dll',
'txfw32.dll',
'tzres.dll',
'ubpm.dll',
'ucmhc.dll',
'udhisapi.dll',
'udwm.dll',
'uexfat.dll',
'ufat.dll',
'uianimation.dll',
'uiautomationcore.dll',
'uicom.dll',
'uihub.dll',
'uiribbon.dll',
'uiribbonres.dll',
'ulib.dll',
'umb.dll',
'umdmxfrm.dll',
'umpo.dll',
'umrdp.dll',
'unattend.dll',
'unimdmat.dll',
'uniplat.dll',
'untfs.dll',
'upnp.dll',
'upnphost.dll',
'ureg.dll',
'urlmon.dll',
'usbceip.dll',
'usbmon.dll',
'usbperf.dll',
'usbui.dll',
'user32.dll',
'useraccountcontrolsettings.dll',
'usercpl.dll',
'userenv.dll',
'utildll.dll',
'uudf.dll',
'uxinit.dll',
'uxlib.dll',
'uxlibres.dll',
'uxsms.dll',
'uxtheme.dll',
'van.dll',
'vault.dll',
'vaultcredprovider.dll',
'vaultsvc.dll',
'vbajet32.dll',
'vboxoglarrayspu.dll',
'vboxoglcrutil.dll',
'vboxoglerrorspu.dll',
'vboxoglfeedbackspu.dll',
'vboxoglpackspu.dll',
'vboxoglpassthroughspu.dll',
'vbscript.dll',
'vdmdbg.dll',
'vds_ps.dll',
'vdsbas.dll',
'vdsdyn.dll',
'vdsutil.dll',
'vdsvd.dll',
'verifier.dll',
'version.dll',
'vfpodbc.dll',
'vfwwdm32.dll',
'vga.dll',
'vga64k.dll',
'vga256.dll',
'vidreszr.dll',
'vmbuscoinstaller.dll',
'vmbuspipe.dll',
'vmbusres.dll',
'vmdcoinstall.dll',
'vmicres.dll',
'vmictimeprovider.dll',
'vmstorfltres.dll',
'vpnike.dll',
'vpnikeapi.dll',
'vss_ps.dll',
'vssapi.dll',
'w32time.dll',
'w32topl.dll',
'wabsyncprovider.dll',
'wavdest.dll',
'wavemsp.dll',
'wbemcomn.dll',
'wbiosrvc.dll',
'wcnapi.dll',
'wcncsvc.dll',
'wcneapauthproxy.dll',
'wcneappeerproxy.dll',
'wcnnetsh.dll',
'wcnwiz.dll',
'wcspluginservice.dll',
'wdc.dll',
'wdi.dll',
'wdiasqmmodule.dll',
'wdigest.dll',
'webcheck.dll',
'webclnt.dll',
'webio.dll',
'wecapi.dll',
'wecsvc.dll',
'werconcpl.dll',
'wercplsupport.dll',
'werdiagcontroller.dll',
'wersvc.dll',
'werui.dll',
'wevtfwd.dll',
'wevtsvc.dll',
'wfapigp.dll',
'wfhc.dll',
'wfsr.dll',
'whealogr.dll',
'whhelper.dll',
'wiaaut.dll',
'wiadefui.dll',
'wiadss.dll',
'wiarpc.dll',
'wiascanprofiles.dll',
'wiaservc.dll',
'wiashext.dll',
'wiatrace.dll',
'wiavideo.dll',
'win32spl.dll',
'winbio.dll',
'wincredprovider.dll',
'windowscodecs.dll',
'windowscodecsext.dll',
'winethc.dll',
'winfax.dll',
'winhttp.dll',
'wininet - copy.dll',
'wininet.dll',
'winipsec.dll',
'winmm.dll',
'winrnr.dll',
'winrscmd.dll',
'winrsmgr.dll',
'winrssrv.dll',
'winsatapi.dll',
'winscard.dll',
'winshfhc.dll',
'winsockhc.dll',
'winsrpc.dll',
'winsrv.dll',
'winsta.dll',
'winsync.dll',
'winsyncmetastore.dll',
'winsyncproviders.dll',
'wksprtps.dll',
'wkssvc.dll',
'wlancfg.dll',
'wlanconn.dll',
'wlandlg.dll',
'wlangpui.dll',
'wlanhc.dll',
'wlanhlp.dll',
'wlaninst.dll',
'wlanmm.dll',
'wlanmsm.dll',
'wlanpref.dll',
'wlansec.dll',
'wlansvc.dll',
'wlanui.dll',
'wldap32.dll',
'wlgpclnt.dll',
'wls0wndh.dll',
'wmadmod.dll',
'wmadmoe.dll',
'wmalfxgfxdsp.dll',
'wmasf.dll',
'wmcodecdspps.dll',
'wmdmlog.dll',
'wmdmps.dll',
'wmdrmdev.dll',
'wmdrmnet.dll',
'wmdrmsdk.dll',
'wmerror.dll',
'wmi.dll',
'wmicmiplugin.dll',
'wmidx.dll',
'wmiprop.dll',
'wmnetmgr.dll',
'wmp.dll',
'wmpcm.dll',
'wmpdui.dll',
'wmpdxm.dll',
'wmpeffects.dll',
'wmpencen.dll',
'wmphoto.dll',
'wmploc.dll',
'wmpmde.dll',
'wmpps.dll',
'wmpshell.dll',
'wmpsrcwp.dll',
'wmspdmod.dll',
'wmspdmoe.dll',
'wmvcore.dll',
'wmvdecod.dll',
'wmvdspa.dll',
'wmvencod.dll',
'wmvsdecd.dll',
'wmvsencd.dll',
'wmvxencd.dll',
'wpc.dll',
'wpcao.dll',
'wpccpl.dll',
'wpcmig.dll',
'wpcsvc.dll',
'wpcumi.dll',
'wpd_ci.dll',
'wpdbusenum.dll',
'wpdshext.dll',
'wpdshserviceobj.dll',
'wpdsp.dll',
'wpdwcn.dll',
'ws2_32.dll',
'ws2help.dll',
'wscapi.dll',
'wscinterop.dll',
'wscisvif.dll',
'wscmisetup.dll',
'wscproxystub.dll',
'wscsvc.dll',
'wsdapi.dll',
'wsdchngr.dll',
'wsdmon.dll',
'wsdprintproxy.dll',
'wsdscanproxy.dll',
'wsecedit.dll',
'wsepno.dll',
'wshbth.dll',
'wshcon.dll',
'wshelper.dll',
'wshext.dll',
'wship6.dll',
'wshirda.dll',
'wshnetbs.dll',
'wshqos.dll',
'wshrm.dll',
'wshtcpip.dll',
'wsmanmigrationplugin.dll',
'wsmauto.dll',
'wsmplpxy.dll',
'wsmres.dll',
'wsmsvc.dll',
'wsmwmipl.dll',
'wsock32.dll',
'wtsapi32.dll',
'wuapi.dll',
'wuaueng.dll',
'wucltux.dll',
'wudfcoinstaller.dll',
'wudfplatform.dll',
'wudfsvc.dll',
'wudfx.dll',
'wudriver.dll',
'wups.dll',
'wups2.dll',
'wuwebv.dll',
'wvc.dll',
'wwanadvui.dll',
'wwanapi.dll',
'wwancfg.dll',
'wwanconn.dll',
'wwanhc.dll',
'wwaninst.dll',
'wwanmm.dll',
'wwanpref.dll',
'wwanprotdim.dll',
'wwansvc.dll',
'wwapi.dll',
'wzcdlg.dll',
'xinput9_1_0.dll',
'xmlfilter.dll',
'xmlprovi.dll',
'xolehlp.dll',
'xpsfilt.dll',
'xpsgdiconverter.dll',
'xpsprint.dll',
'xpsrasterservice.dll',
'xpsservices.dll',
'xpsshhdr.dll',
'xpssvcs.dll',
'xwizards.dll',
'xwreg.dll',
'xwtpdui.dll',
'xwtpw32.dll',
'zgmprxy.dll',
'zipfldr.dll',
])
WIN8_DLLS = frozenset([
'adhapi.dll',
'adhsvc.dll',
'adrclient.dll',
'api-ms-win-appmodel-identity-l1-1-0.dll',
'api-ms-win-appmodel-state-l1-1-0.dll',
'api-ms-win-base-bootconfig-l1-1-0.dll',
'api-ms-win-base-util-l1-1-0.dll',
'api-ms-win-core-appcompat-l1-1-0.dll',
'api-ms-win-core-appinit-l1-1-0.dll',
'api-ms-win-core-bem-l1-1-0.dll',
'api-ms-win-core-bicltapi-l1-1-0.dll',
'api-ms-win-core-biplmapi-l1-1-0.dll',
'api-ms-win-core-biptcltapi-l1-1-0.dll',
'api-ms-win-core-comm-l1-1-0.dll',
'api-ms-win-core-fibers-l2-1-0.dll',
'api-ms-win-core-firmware-l1-1-0.dll',
'api-ms-win-core-localization-obsolete-l1-1-0.dll',
'api-ms-win-core-multipleproviderrouter-l1-1-0.dll',
'api-ms-win-core-processsecurity-l1-1-0.dll',
'api-ms-win-core-processtopology-l1-1-0.dll',
'api-ms-win-core-psapi-obsolete-l1-1-0.dll',
'api-ms-win-core-psm-info-l1-1-0.dll',
'api-ms-win-core-psm-plm-l1-1-0.dll',
'api-ms-win-core-registry-private-l1-1-0.dll',
'api-ms-win-core-stringloader-l1-1-0.dll',
'api-ms-win-core-timezone-private-l1-1-0.dll',
'api-ms-win-core-version-private-l1-1-0.dll',
'api-ms-win-core-versionansi-l1-1-0.dll',
'api-ms-win-core-winrt-errorprivate-l1-1-0.dll',
'api-ms-win-core-winrt-propertysetprivate-l1-1-0.dll',
'api-ms-win-core-winrt-registration-l1-1-0.dll',
'api-ms-win-core-winrt-roparameterizediid-l1-1-0.dll',
'api-ms-win-core-xstate-l1-1-1.dll',
'api-ms-win-devices-config-l1-1-0.dll',
'api-ms-win-devices-swdevice-l1-1-0.dll',
'api-ms-win-eventing-obsolete-l1-1-0.dll',
'api-ms-win-eventlog-private-l1-1-0.dll',
'api-ms-win-gdi-ie-rgn-l1-1-0.dll',
'api-ms-win-http-time-l1-1-0.dll',
'api-ms-win-input-ie-interactioncontext-l1-1-0.dll',
'api-ms-win-mm-joystick-l1-1-0.dll',
'api-ms-win-mm-mci-l1-1-0.dll',
'api-ms-win-mm-misc-l1-1-0.dll',
'api-ms-win-mm-misc-l2-1-0.dll',
'api-ms-win-mm-mme-l1-1-0.dll',
'api-ms-win-mm-playsound-l1-1-0.dll',
'api-ms-win-net-isolation-l1-1-0.dll',
'api-ms-win-ntuser-dc-access-l1-1-0.dll',
'api-ms-win-ntuser-ie-clipboard-l1-1-0.dll',
'api-ms-win-ntuser-ie-message-l1-1-0.dll',
'api-ms-win-ntuser-ie-window-l1-1-0.dll',
'api-ms-win-ntuser-ie-wmpointer-l1-1-0.dll',
'api-ms-win-ntuser-uicontext-l1-1-0.dll',
'api-ms-win-ro-typeresolution-l1-1-0.dll',
'api-ms-win-security-appcontainer-l1-1-0.dll',
'api-ms-win-security-audit-l1-1-0.dll',
'api-ms-win-security-base-private-l1-1-0.dll',
'api-ms-win-security-sddl-ansi-l1-1-0.dll',
'api-ms-win-security-sddlparsecond-l1-1-0.dll',
'api-ms-win-shlwapi-ie-l1-1-0.dll',
'apprepapi.dll',
'apprepsync.dll',
'appsruprov.dll',
'appxalluserstore.dll',
'appxdeploymentclient.dll',
'appxdeploymentextensions.dll',
'appxdeploymentserver.dll',
'appxpackaging.dll',
'appxsip.dll',
'appxstreamingdatasourceps.dll',
'appxsysprep.dll',
'aspnet_counters.dll',
'audioendpointbuilder.dll',
'auinstallagent.dll',
'authbroker.dll',
'authext.dll',
'authhostproxy.dll',
'bcp47langs.dll',
'bi.dll',
'bisrv.dll',
'bitsprx7.dll',
'biwinrt.dll',
'bluetoothapis.dll',
'bootmenuux.dll',
'bootux.dll',
'bthhfsrv.dll',
'bthradiomedia.dll',
'bthsqm.dll',
'callbuttons.dll',
'callbuttons.proxystub.dll',
'certca.dll',
'cfmifs.dll',
'cfmifsproxy.dll',
'chartv.dll',
'clrhost.dll',
'cmdext.dll',
'cngcredui.dll',
'connectedaccountstate.dll',
'consentux.dll',
'cryptcatsvc.dll',
'cryptowinrt.dll',
'cryptuiwizard.dll',
'daconn.dll',
'dafbth.dll',
'dafprintprovider.dll',
'dafupnp.dll',
'dafwcn.dll',
'dafwfdprovider.dll',
'dafwsd.dll',
'damm.dll',
'daotpcredentialprovider.dll',
'das.dll',
'datusage.dll',
'ddp_ps.dll',
'ddpchunk.dll',
'ddptrace.dll',
'ddputils.dll',
'defaultdevicemanager.dll',
'defaultprinterprovider.dll',
'delegatorprovider.dll',
'devdispitemprovider.dll',
'deviceaccess.dll',
'devicedriverretrievalclient.dll',
'deviceelementsource.dll',
'devicemetadataretrievalclient.dll',
'devicesetupmanager.dll',
'devicesetupmanagerapi.dll',
'devicesetupstatusprovider.dll',
'devinv.dll',
'devpropmgr.dll',
'discan.dll',
'dismapi.dll',
'dlnashext.dll',
'dmvscres.dll',
'dot3mm.dll',
'dpapi.dll',
'dpapisrv.dll',
'dsui.dll',
'dxgwdi.dll',
'easconsent.dll',
'easinvoker.proxystub.dll',
'easwrt.dll',
'elshyph.dll',
'energyprov.dll',
'energytask.dll',
'ext-ms-win-advapi32-auth-l1-1-0.dll',
'ext-ms-win-advapi32-encryptedfile-l1-1-0.dll',
'ext-ms-win-advapi32-eventingcontroller-l1-1-0.dll',
'ext-ms-win-advapi32-eventlog-l1-1-0.dll',
'ext-ms-win-advapi32-lsa-l1-1-0.dll',
'ext-ms-win-advapi32-msi-l1-1-0.dll',
'ext-ms-win-advapi32-ntmarta-l1-1-0.dll',
'ext-ms-win-advapi32-psm-app-l1-1-0.dll',
'ext-ms-win-advapi32-registry-l1-1-0.dll',
'ext-ms-win-advapi32-safer-l1-1-0.dll',
'ext-ms-win-advapi32-shutdown-l1-1-0.dll',
'ext-ms-win-authz-claimpolicies-l1-1-0.dll',
'ext-ms-win-authz-context-l1-1-0.dll',
'ext-ms-win-authz-remote-l1-1-0.dll',
'ext-ms-win-biometrics-winbio-l1-1-0.dll',
'ext-ms-win-bluetooth-deviceassociation-l1-1-0.dll',
'ext-ms-win-branding-winbrand-l1-1-0.dll',
'ext-ms-win-cluster-clusapi-l1-1-0.dll',
'ext-ms-win-cluster-resutils-l1-1-0.dll',
'ext-ms-win-cmd-util-l1-1-0.dll',
'ext-ms-win-cng-rng-l1-1-0.dll',
'ext-ms-win-com-clbcatq-l1-1-0.dll',
'ext-ms-win-com-psmregister-l1-1-0.dll',
'ext-ms-win-domainjoin-netjoin-l1-1-0.dll',
'ext-ms-win-firewallapi-webproxy-l1-1-0.dll',
'ext-ms-win-fs-clfs-l1-1-0.dll',
'ext-ms-win-fsutilext-ifsutil-l1-1-0.dll',
'ext-ms-win-fsutilext-ulib-l1-1-0.dll',
'ext-ms-win-gdi-dc-l1-1-0.dll',
'ext-ms-win-gdi-devcaps-l1-1-0.dll',
'ext-ms-win-gdi-metafile-l1-1-0.dll',
'ext-ms-win-gdi-path-l1-1-0.dll',
'ext-ms-win-gdi-render-l1-1-0.dll',
'ext-ms-win-gdi-rgn-l1-1-0.dll',
'ext-ms-win-gdi-wcs-l1-1-0.dll',
'ext-ms-win-gpapi-grouppolicy-l1-1-0.dll',
'ext-ms-win-gui-uxinit-l1-1-0.dll',
'ext-ms-win-kernel32-appcompat-l1-1-0.dll',
'ext-ms-win-kernel32-datetime-l1-1-0.dll',
'ext-ms-win-kernel32-errorhandling-l1-1-0.dll',
'ext-ms-win-kernel32-file-l1-1-0.dll',
'ext-ms-win-kernel32-package-current-l1-1-0.dll',
'ext-ms-win-kernel32-registry-l1-1-0.dll',
'ext-ms-win-kernel32-sidebyside-l1-1-0.dll',
'ext-ms-win-kernel32-transacted-l1-1-0.dll',
'ext-ms-win-kernel32-windowserrorreporting-l1-1-0.dll',
'ext-ms-win-kernelbase-processthread-l1-1-0.dll',
'ext-ms-win-mf-winmm-l1-1-0.dll',
'ext-ms-win-mm-msacm-l1-1-0.dll',
'ext-ms-win-mm-pehelper-l1-1-0.dll',
'ext-ms-win-mm-wmdrmsdk-l1-1-0.dll',
'ext-ms-win-mpr-multipleproviderrouter-l1-1-0.dll',
'ext-ms-win-mrmcorer-resmanager-l1-1-0.dll',
'ext-ms-win-msiltcfg-msi-l1-1-0.dll',
'ext-ms-win-networking-winipsec-l1-1-0.dll',
'ext-ms-win-newdev-config-l1-1-0.dll',
'ext-ms-win-ntdsa-activedirectoryserver-l1-1-0.dll',
'ext-ms-win-ntdsapi-activedirectoryclient-l1-1-0.dll',
'ext-ms-win-ntos-ksecurity-l1-1-0.dll',
'ext-ms-win-ntos-ksigningpolicy-l1-1-0.dll',
'ext-ms-win-ntos-tm-l1-1-0.dll',
'ext-ms-win-ntuser-caret-l1-1-0.dll',
'ext-ms-win-ntuser-dc-access-ext-l1-1-0.dll',
'ext-ms-win-ntuser-menu-l1-1-0.dll',
'ext-ms-win-ntuser-mouse-l1-1-0.dll',
'ext-ms-win-ntuser-powermanagement-l1-1-0.dll',
'ext-ms-win-ntuser-private-l1-1-0.dll',
'ext-ms-win-ntuser-string-l1-1-0.dll',
'ext-ms-win-ntuser-sysparams-ext-l1-1-0.dll',
'ext-ms-win-ole32-clipboard-ie-l1-1-0.dll',
'ext-ms-win-ole32-ie-ext-l1-1-0.dll',
'ext-ms-win-ole32-oleautomation-l1-1-0.dll',
'ext-ms-win-printer-winspool-l1-1-0.dll',
'ext-ms-win-profile-profsvc-l1-1-0.dll',
'ext-ms-win-profile-userenv-l1-1-0.dll',
'ext-ms-win-ras-rasapi32-l1-1-0.dll',
'ext-ms-win-ras-rasdlg-l1-1-0.dll',
'ext-ms-win-ras-rasman-l1-1-0.dll',
'ext-ms-win-ras-tapi32-l1-1-0.dll',
'ext-ms-win-rometadata-dispenser-l1-1-0.dll',
'ext-ms-win-samsrv-accountstore-l1-1-0.dll',
'ext-ms-win-scesrv-server-l1-1-0.dll',
'ext-ms-win-secur32-translatename-l1-1-0.dll',
'ext-ms-win-security-cryptui-l1-1-0.dll',
'ext-ms-win-security-kerberos-l1-1-0.dll',
'ext-ms-win-security-vaultcli-l1-1-0.dll',
'ext-ms-win-session-userinit-l1-1-0.dll',
'ext-ms-win-session-wininit-l1-1-0.dll',
'ext-ms-win-setupapi-cfgmgr32local-l1-1-0.dll',
'ext-ms-win-setupapi-cfgmgr32remote-l1-1-0.dll',
'ext-ms-win-setupapi-classinstallers-l1-1-0.dll',
'ext-ms-win-setupapi-inf-l1-1-0.dll',
'ext-ms-win-setupapi-logging-l1-1-0.dll',
'ext-ms-win-shell-propsys-l1-1-0.dll',
'ext-ms-win-shell-shell32-l1-1-0.dll',
'ext-ms-win-shell-shlwapi-l1-1-0.dll',
'ext-ms-win-shell32-shellcom-l1-1-0.dll',
'ext-ms-win-smbshare-sscore-l1-1-0.dll',
'ext-ms-win-spinf-inf-l1-1-0.dll',
'ext-ms-win-sxs-oleautomation-l1-1-0.dll',
'ext-ms-win-umpoext-umpo-l1-1-0.dll',
'ext-ms-win-webio-pal-l1-1-0.dll',
'ext-ms-win-winhttp-pal-l1-1-0.dll',
'ext-ms-win-wininet-pal-l1-1-0.dll',
'ext-ms-win-wlan-grouppolicy-l1-1-0.dll',
'ext-ms-win-wlan-onexui-l1-1-0.dll',
'ext-ms-win-wlan-scard-l1-1-0.dll',
'ext-ms-win-wsclient-devlicense-l1-1-0.dll',
'ext-ms-win-wwan-wwapi-l1-1-0.dll',
'fddevquery.dll',
'fhautoplay.dll',
'fhcat.dll',
'fhcfg.dll',
'fhcleanup.dll',
'fhcpl.dll',
'fhengine.dll',
'fhevents.dll',
'fhlisten.dll',
'fhshl.dll',
'fhsrchapi.dll',
'fhsrchph.dll',
'fhsvc.dll',
'fhsvcctl.dll',
'fhtask.dll',
'fhuxadapter.dll',
'fhuxapi.dll',
'fhuxcommon.dll',
'fhuxgraphics.dll',
'fhuxpresentation.dll',
'fileappxstreamingdatasource.dll',
'fmapi.dll',
'fms.dll',
'frprov.dll',
'fsutilext.dll',
'fveskybackup.dll',
'gdiplus.dll',
'genuinecenter.dll',
'glcndfilter.dll',
'halextintclpiodma.dll',
'halextintcuartdma.dll',
'hotspotauth.dll',
'httpprxm.dll',
'httpprxp.dll',
'icsvc.dll',
'idctrls.dll',
'ieadvpack.dll',
'inputswitch.dll',
'ir32_32.dll',
'ir41_qc.dll',
'ir41_qcx.dll',
'ir50_32.dll',
'ir50_qc.dll',
'ir50_qcx.dll',
'iscsiwmiv2.dll',
'iuilp.dll',
'jscript9.dll',
'kbdarmph.dll',
'kbdarmty.dll',
'kbdcher.dll',
'kbdcherp.dll',
'kbdfar.dll',
'kbdgeome.dll',
'kbdgeooa.dll',
'kbdhaw.dll',
'kbdhebl3.dll',
'kbdinen.dll',
'kbdkni.dll',
'kbdkurd.dll',
'kbdlisub.dll',
'kbdlisus.dll',
'kbdmyan.dll',
'kbdnko.dll',
'kbdntl.dll',
'kbdogham.dll',
'kbdphags.dll',
'kbdrum.dll',
'kbdtaile.dll',
'kbdtifi.dll',
'kbdtifi2.dll',
'kd.dll',
'kd_02_10ec.dll',
'kd_02_14e4.dll',
'kd_02_8086.dll',
'kdhv1394.dll',
'kdnet.dll',
'kdscli.dll',
'kdstub.dll',
'kdvm.dll',
'keepaliveprovider.dll',
'livessp.dll',
'lldpnotify.dll',
'lscshostpolicy.dll',
'lsm.dll',
'maintenanceui.dll',
'mbaeapi.dll',
'mbaeapipublic.dll',
'mbaexmlparser.dll',
'mbsmsapi.dll',
'mbussdapi.dll',
'memorydiagnostic.dll',
'mfasfsrcsnk.dll',
'mfcaptureengine.dll',
'mfcore.dll',
'mfmediaengine.dll',
'mfmp4srcsnk.dll',
'mfmpeg2srcsnk.dll',
'mfnetcore.dll',
'mfnetsrc.dll',
'mfsrcsnk.dll',
'mfsvr.dll',
'mftranscode.dll',
'microsoft-windows-battery-events.dll',
'microsoft-windows-kernel-pnp-events.dll',
'microsoft-windows-pdc.dll',
'microsoft-windows-processor-aggregator-events.dll',
'migflt.dll',
'miutils.dll',
'mprext.dll',
'mrmcorer.dll',
'mrmindexer.dll',
'msauddecmft.dll',
'msched.dll',
'msidcrl40.dll',
'msiwer.dll',
'mskeyprotcli.dll',
'mskeyprotect.dll',
'mspatchc.dll',
'msspellcheckingfacility.dll',
'mstextprediction.dll',
'msvcp110_clr0400.dll',
'msvcr100_clr0400.dll',
'msvcr110_clr0400.dll',
'msvideodsp.dll',
'msvproc.dll',
'ncaapi.dll',
'ncasvc.dll',
'ncbservice.dll',
'ncdautosetup.dll',
'ncryptprov.dll',
'ncryptsslp.dll',
'ndisimplatform.dll',
'nduprov.dll',
'netprofmsvc.dll',
'netprovisionsp.dll',
'networkstatus.dll',
'nlmproxy.dll',
'ntasn1.dll',
'ntvdmcpl.dll',
'oemlicense.dll',
'osksupport.dll',
'packagestateroaming.dll',
'pcacli.dll',
'pcpksp.dll',
'pcptpm12.dll',
'peerdistad.dll',
'peerdistcleaner.dll',
'playlistfolder.dll',
'playtomanager.dll',
'playtostatusprovider.dll',
'pnppolicy.dll',
'printdialogs.dll',
'profext.dll',
'profsvcext.dll',
'provcore.dll',
'proximitycommon.dll',
'proximityservice.dll',
'prvdmofcomp.dll',
'psmodulediscoveryprovider.dll',
'psmsrv.dll',
'pstask.dll',
'purchasewindowslicense.dll',
'pwlauncher.dll',
'pwsso.dll',
'rdpcorets.dll',
'rdpudd.dll',
'rdsappxhelper.dll',
'rdsdwmdr.dll',
'rdvvmtransport.dll',
'removedevicecontexthandler.dll',
'removedeviceelevated.dll',
'reseteng.dll',
'resetengmig.dll',
'rfxvmt.dll',
'rmapi.dll',
'roamingsecurity.dll',
'rometadata.dll',
'rotmgr.dll',
'scavengeui.dll',
'sensorperformanceevents.dll',
'setnetworklocation.dll',
'settingmonitor.dll',
'settingsync.dll',
'settingsyncinfo.dll',
'shcore.dll',
'simauth.dll',
'simcfg.dll',
'smartcardsimulator.dll',
'smbwmiv2.dll',
'smsdeviceaccessrevocation.dll',
'smspace.dll',
'smsrouter.dll',
'sortwindows61.dll',
'spacecontrol.dll',
'spmpm.dll',
'sqlcecompact40.dll',
'sqlceoledb40.dll',
'sqlceqp40.dll',
'sqlcese40.dll',
'srevents.dll',
'srh.dll',
'srm.dll',
'srm_ps.dll',
'srmclient.dll',
'srmlib.dll',
'srmscan.dll',
'srmshell.dll',
'srmstormod.dll',
'srmtrace.dll',
'srumapi.dll',
'srumsvc.dll',
'sscoreext.dll',
'startupscan.dll',
'storagewmi.dll',
'storagewmi_passthru.dll',
'storewuauth.dll',
'storsvc.dll',
'subscriptionmgr.dll',
'svsvc.dll',
'systemeventsbrokerclient.dll',
'systemeventsbrokerserver.dll',
'threadpoolwinrt.dll',
'timebrokerclient.dll',
'timebrokerserver.dll',
'timesynctask.dll',
'tpmtasks.dll',
'tpmvsc.dll',
'tssrvlic.dll',
'tsusbgdcoinstaller.dll',
'tsusbredirectiongrouppolicyextension.dll',
'ttlsauth.dll',
'ttlscfg.dll',
'twinapi.dll',
'twinui.dll',
'uiautomationcoreres.dll',
'uireng.dll',
'umpoext.dll',
'umpowmi.dll',
'userinitext.dll',
'userlanguageprofilecallback.dll',
'userlanguagescpl.dll',
'ustprov.dll',
'vaultroaming.dll',
'vmapplicationhealthmonitorproxy.dll',
'vmrdvcore.dll',
'vscmgrps.dll',
'wcmapi.dll',
'wcmcsp.dll',
'wcmsvc.dll',
'wdfres.dll',
'webcamui.dll',
'websocket.dll',
'wfdprov.dll',
'windows.applicationmodel.background.systemeventsbroker.dll',
'windows.applicationmodel.background.timebroker.dll',
'windows.applicationmodel.dll',
'windows.applicationmodel.store.dll',
'windows.applicationmodel.store.testingframework.dll',
'windows.devices.enumeration.dll',
'windows.devices.enumeration.ps.dll',
'windows.devices.geolocation.dll',
'windows.devices.portable.dll',
'windows.devices.printers.extensions.dll',
'windows.devices.sensors.dll',
'windows.globalization.dll',
'windows.globalization.fontgroups.dll',
'windows.graphics.dll',
'windows.graphics.printing.dll',
'windows.help.runtime.dll',
'windows.immersiveshell.serviceprovider.dll',
'windows.media.devices.dll',
'windows.media.dll',
'windows.media.mediacontrol.dll',
'windows.media.renewal.dll',
'windows.media.streaming.dll',
'windows.media.streaming.ps.dll',
'windows.networking.backgroundtransfer.dll',
'windows.networking.connectivity.dll',
'windows.networking.dll',
'windows.networking.networkoperators.hotspotauthentication.dll',
'windows.networking.proximity.dll',
'windows.networking.sockets.pushenabledapplication.dll',
'windows.security.authentication.onlineid.dll',
'windows.security.credentials.ui.credentialpicker.dll',
'windows.storage.applicationdata.dll',
'windows.storage.compression.dll',
'windows.system.display.dll',
'windows.system.profile.hardwareid.dll',
'windows.system.remotedesktop.dll',
'windows.ui.dll',
'windows.ui.immersive.dll',
'windows.ui.input.inking.dll',
'windows.ui.xaml.dll',
'windows.web.dll',
'windowslivelogin.dll',
'wininitext.dll',
'winlangdb.dll',
'winmde.dll',
'winmmbase.dll',
'winmsoirmprotector.dll',
'winopcirmprotector.dll',
'winsku.dll',
'wintypes.dll',
'wisp.dll',
'witnesswmiv2provider.dll',
'wkspbrokerax.dll',
'wlanradiomanager.dll',
'wlidcli.dll',
'wlidcredprov.dll',
'wlidfdp.dll',
'wlidnsp.dll',
'wlidprov.dll',
'wlidres.dll',
'wlidsvc.dll',
'wlroamextension.dll',
'wmidcom.dll',
'wmitomi.dll',
'workerdd.dll',
'wpcwebsync.dll',
'wpnapps.dll',
'wpncore.dll',
'wpninprc.dll',
'wpnprv.dll',
'wpnsruprov.dll',
'wsclient.dll',
'wsmagent.dll',
'wsservice.dll',
'wsshared.dll',
'wssync.dll',
'wuaext.dll',
'wusettingsprovider.dll',
'wushareduxresources.dll',
'wwaapi.dll',
'wwanradiomanager.dll',
'xaudio2_8.dll',
'xinput1_4.dll',
])
WIN10_DLLS = frozenset([
'aadauthhelper.dll',
'aadcloudap.dll',
'aadtb.dll',
'abovelockapphost.dll',
'accountaccessor.dll',
'accountscontrolinternal.dll',
'accountsrt.dll',
'acmigration.dll',
'acpbackgroundmanagerpolicy.dll',
'activationclient.dll',
'activationmanager.dll',
'activesynccsp.dll',
'activesyncprovider.dll',
'addressparser.dll',
'advapi32res.dll',
'aeproam.dll',
'ajrouter.dll',
'amsi.dll',
'amsiproxy.dll',
'aphostclient.dll',
'aphostres.dll',
'aphostservice.dll',
'api-ms-win-core-file-l2-1-0.dll',
'api-ms-win-core-file-l2-1-1.dll',
'api-ms-win-core-heap-obsolete-l1-1-0.dll',
'api-ms-win-core-kernel32-private-l1-1-0.dll',
'api-ms-win-core-kernel32-private-l1-1-1.dll',
'api-ms-win-core-localization-obsolete-l1-2-0.dll',
'api-ms-win-core-string-l2-1-0.dll',
'api-ms-win-core-string-obsolete-l1-1-0.dll',
'api-ms-win-core-stringloader-l1-1-1.dll',
'api-ms-win-core-xstate-l2-1-0.dll',
'api-ms-win-devices-config-l1-1-0.dll',
'api-ms-win-devices-config-l1-1-1.dll',
'api-ms-win-eventing-classicprovider-l1-1-0.dll',
'api-ms-win-eventing-controller-l1-1-0.dll',
'api-ms-win-eventing-legacy-l1-1-0.dll',
'api-ms-win-eventing-provider-l1-1-0.dll',
'api-ms-win-eventlog-legacy-l1-1-0.dll',
'api-ms-win-security-lsalookup-l2-1-0.dll',
'api-ms-win-security-lsalookup-l2-1-1.dll',
'api-ms-win-security-lsapolicy-l1-1-0.dll',
'api-ms-win-security-provider-l1-1-0.dll',
'api-ms-win-service-private-l1-1-1.dll',
'appcapture.dll',
'appcontracts.dll',
'applicationframe.dll',
'applockercsp.dll',
'appointmentactivation.dll',
'appointmentapis.dll',
'appraiser.dll',
'appreadiness.dll',
'appwiz.cpl',
'appxapplicabilityblob.dll',
'appxapplicabilityengine.dll',
'atlthunk.dll',
'authbrokerui.dll',
'autoworkplacen.dll',
'azuresettingsyncprovider.dll',
'backgroundmediapolicy.dll',
'bcastdvr.proxy.dll',
'bdesysprep.dll',
'bingasds.dll',
'bingmaps.dll',
'bingonlineservices.dll',
'bitsproxy.dll',
'brokerlib.dll',
'browserbroker.dll',
'browsersettingsync.dll',
'bthprops.cpl',
'bthtelemetry.dll',
'c_gsm7.dll',
'callhistoryclient.dll',
'cameracaptureui.dll',
'capauthz.dll',
'castlaunch.dll',
'cbtbackgroundmanagerpolicy.dll',
'cdp.dll',
'cdpsvc.dll',
'cellularapi.dll',
'cemapi.dll',
'cfgsppolicy.dll',
'chakra.dll',
'chakradiag.dll',
'chatapis.dll',
'clipboardserver.dll',
'clipc.dll',
'clipsvc.dll',
'cloudap.dll',
'clouddomainjoinaug.dll',
'clouddomainjoindatamodelserver.dll',
'cloudexperiencehost.dll',
'cloudexperiencehostbroker.dll',
'cloudexperiencehostcommon.dll',
'cloudexperiencehostuser.dll',
'cmgrcspps.dll',
'cmintegrator.dll',
'coml2.dll',
'commstypehelperutil_ca.dll',
'comppkgsup.dll',
'configmanager2.dll',
'configurationclient.dll',
'configureexpandedstorage.dll',
'conhostv1.dll',
'conhostv2.dll',
'consolelogon.dll',
'contactactivation.dll',
'contactapis.dll',
'contactharvesterds.dll',
'contentdeliverymanager.utilities.dll',
'coredpus.dll',
'coremessaging.dll',
'coremmres.dll',
'coreuicomponents.dll',
'cortana.persona.dll',
'cortanamapihelper.dll',
'cortanamapihelper.proxystub.dll',
'courtesyengine.dll',
'credentialmigrationhandler.dll',
'credprovdatamodel.dll',
'credprovhost.dll',
'credprovs.dll',
'crypttpmeksvc.dll',
'csystemeventsbrokerclient.dll',
'd3d12.dll',
'd3dcompiler_47.dll',
'dab.dll',
'dabapi.dll',
'dafcdp.dll',
'dafdnssd.dll',
'dafdockingprovider.dll',
'dafpos.dll',
'dafwiprov.dll',
'damediamanager.dll',
'dataexchange.dll',
'datasensehandlers.dll',
'davsyncprovider.dll',
'dbgcore.dll',
'dbgmodel.dll',
'dcpapi.dll',
'dcpsvc.dll',
'dcpurapi.dll',
'ddds.dll',
'defragres.dll',
'desk.cpl',
'desktopshellext.dll',
'developeroptionssettingshandlers.dll',
'deviceregistration.dll',
'devicesflowbroker.dll',
'devquerybroker.dll',
'diagnosticlogcsp.dll',
'diagtrack.dll',
'diagtrack_win.dll',
'diagtrack_wininternal.dll',
'dialclient.dll',
'dialserver.dll',
'dictationmanager.dll',
'directmanipulation.dll',
'displaymanager.dll',
'dmapisetextimpl.dll',
'dmappsres.dll',
'dmcfgutils.dll',
'dmcommandlineutils.dll',
'dmcsps.dll',
'dmenrollengine.dll',
'dmiso8601utils.dll',
'dmoleaututils.dll',
'dmprocessxmlfiltered.dll',
'dmpushproxy.dll',
'dmpushroutercore.dll',
'dmrserver.dll',
'dmwappushsvc.dll',
'dmwmicsp.dll',
'dmxmlhelputils.dll',
'dockinterface.proxystub.dll',
'dolbydecmft.dll',
'domgmt.dll',
'dosvc.dll',
'dot3conn.dll',
'dsccore.dll',
'dsclient.dll',
'dscproxy.dll',
'dsctimer.dll',
'dssvc.dll',
'dwmghost.dll',
'dwminit.dll',
'eamprogresshandler.dll',
'eapprovp.dll',
'easpoliciesbroker.dll',
'easpoliciesbrokerps.dll',
'edgehtml.dll',
'editbuffertesthook.dll',
'editionupgradehelper.dll',
'edpauditapi.dll',
'edputil.dll',
'eeprov.dll',
'eeutil.dll',
'efsext.dll',
'efswrt.dll',
'emailapis.dll',
'embeddedapplauncherconfig.dll',
'enrollmentapi.dll',
'enrolluxdll.dll',
'enterpriseappmgmtclient.dll',
'enterpriseappmgmtsvc.dll',
'enterprisecsps.dll',
'enterprisedesktopappmgmtcsp.dll',
'enterpriseetw.dll',
'enterprisemodernappmgmtcsp.dll',
'enterpriseresourcemanager.dll',
'errordetails.dll',
'esdsip.dll',
'esevss.dll',
'ethernetmediamanager.dll',
'etweseproviderresources.dll',
'eventaggregation.dll',
'execmodelclient.dll',
'execmodelproxy.dll',
'exsmime.dll',
'extrasxmlparser.dll',
'family.authentication.dll',
'family.cache.dll',
'family.client.dll',
'family.syncengine.core.dll',
'familysafetyext.dll',
'faxprinterinstaller.dll',
'fhsettingsprovider.dll',
'fingerprintcredential.dll',
'fingerprintenrollment.dll',
'firewall.cpl',
'flightsettings.dll',
'fontglyphanimator.dll',
'fontgroupsoverride.dll',
'fontprovider.dll',
'fwbase.dll',
'fwpolicyiomgr.dll',
'gamingtcui.dll',
'generaltel.dll',
'geocommon.dll',
'geolocation.dll',
'geolocatorhelper.dll',
'globcollationhost.dll',
'globinputhost.dll',
'gnssadapter.dll',
'halextpl080.dll',
'hascsp.dll',
'hdwwiz.cpl',
'hevcdecoder.dll',
'hmkd.dll',
'hrtfapo.dll',
'httpprxc.dll',
'httpsdatasource.dll',
'ieetwcollectorres.dll',
'ieetwproxystub.dll',
'ieproxy.dll',
'ihvrilproxy.dll',
'implatsetup.dll',
'inetcpl.cpl',
'inkanalysis.dll',
'inkobjcore.dll',
'inproclogger.dll',
'inputinjectionbroker.dll',
'inputlocalemanager.dll',
'inputservice.dll',
'internetmail.dll',
'internetmailcsp.dll',
'intl.cpl',
'invagent.dll',
'iotassignedaccesslockframework.dll',
'ipeloggingdictationhelper.dll',
'iri.dll',
'irprops.cpl',
'javascriptcollectionagent.dll',
'joinproviderol.dll',
'joinutil.dll',
'joy.cpl',
'jpmapcontrol.dll',
'jscript9diag.dll',
'kbdazst.dll',
'kbdbug.dll',
'kbddzo.dll',
'kbdfthrk.dll',
'kbdgn.dll',
'kbdgthc.dll',
'kbdjav.dll',
'kbdlvst.dll',
'kbdmonst.dll',
'kbdolch.dll',
'kbdoldit.dll',
'kbdosm.dll',
'kbdsora.dll',
'kbdtiprd.dll',
'kbdtt102.dll',
'kbdtzm.dll',
'kd_0c_8086.dll',
'kd_02_10df.dll',
'kd_02_19a2.dll',
'kd_02_1969.dll',
'kd_07_1415.dll',
'kdnet_uart16550.dll',
'kerbclientshared.dll',
'kernel.appcore.dll',
'keyworddetectormsftsidadapter.dll',
'knobscore.dll',
'knobscsp.dll',
'languagecomponentsinstaller.dll',
'legacynetux.dll',
'lfsvc.dll',
'licensemanager.dll',
'licensemanagerapi.dll',
'licensemanagersvc.dll',
'licensingcsp.dll',
'locationcelladapter.dll',
'locationcrowdsource.dll',
'locationframework.dll',
'locationframeworkinternalps.dll',
'locationframeworkps.dll',
'locationgeofences.dll',
'locationpecell.dll',
'locationpecomposite.dll',
'locationpegnss.dll',
'locationpeip.dll',
'locationpelegacywinlocation.dll',
'locationpermissions.dll',
'locationpewifi.dll',
'locationsystemintegration.dll',
'locationwebproxy.dll',
'locationwifiadapter.dll',
'locationwinpalmisc.dll',
'lockappbroker.dll',
'lockscreencontent.dll',
'lockscreencontenthost.dll',
'logoncontroller.dll',
'main.cpl',
'mapconfiguration.dll',
'mapcontrolcore.dll',
'mapcontrolstringsres.dll',
'mapsbtsvc.dll',
'mapsbtsvcproxy.dll',
'mapsstore.dll',
'mapstoasttask.dll',
'mapsupdatetask.dll',
'mbmediamanager.dll',
'mccsengineshared.dll',
'mccspal.dll',
'mcrecvsrc.dll',
'mdmmigrator.dll',
'mdmregistration.dll',
'messagingdatamodel2.dll',
'mfh263enc.dll',
'mfh265enc.dll',
'mfmkvsrcsnk.dll',
'mfperfhelper.dll',
'mibincodec.dll',
'microsoft-windows-appmodelexecevents.dll',
'microsoft-windows-mapcontrols.dll',
'microsoft-windows-moshost.dll',
'microsoft-windows-mostrace.dll',
'microsoft-windows-sleepstudy-events.dll',
'microsoft-windows-storage-tiering-events.dll',
'microsoft-windows-system-events.dll',
'microsoft.management.infrastructure.native.unmanaged.dll',
'microsoftaccountcloudap.dll',
'microsoftaccountextension.dll',
'microsoftaccounttokenprovider.dll',
'mimofcodec.dll',
'miracastinputmgr.dll',
'miracastreceiver.dll',
'mispace.dll',
'mmsys.cpl',
'modernexecserver.dll',
'mos.dll',
'moshost.dll',
'moshostclient.dll',
'moshostcore.dll',
'mpeval.dll',
'mpunits.dll',
'mrt100.dll',
'mrt_map.dll',
'ms3dthumbnailprovider.dll',
'msajapi.dll',
'msalacdecoder.dll',
'msalacencoder.dll',
'msamrnbdecoder.dll',
'msamrnbencoder.dll',
'msamrnbsink.dll',
'msamrnbsource.dll',
'msauserext.dll',
'msctfuimanager.dll',
'msflacdecoder.dll',
'msflacencoder.dll',
'mshtmldac.dll',
'msphotography.dll',
'msvcp120_clr0400.dll',
'mswb7.dll',
'mtf.dll',
'mtfserver.dll',
'musdialoghandlers.dll',
'musupdatehandlers.dll',
'nativemap.dll',
'navshutdown.dll',
'ncpa.cpl',
'ncuprov.dll',
'ndisimplatformnetcfg.dll',
'netprovfw.dll',
'netsetupapi.dll',
'netsetupengine.dll',
'netsetupshim.dll',
'netsetupsvc.dll',
'networkbindingenginemigplugin.dll',
'networkcollectionagent.dll',
'networkdesktopsettings.dll',
'networkhelper.dll',
'networkmobilesettings.dll',
'netwphelper.dll',
'nfcprovisioningplugin.dll',
'nfcradiomedia.dll',
'ngccredprov.dll',
'ngcctnr.dll',
'ngcctnrgidshandler.dll',
'ngcctnrsvc.dll',
'ngckeyenum.dll',
'ngcksp.dll',
'ngcpopkeysrv.dll',
'ngcprocsp.dll',
'ngcsvc.dll',
'ngctasks.dll',
'nmaa.dll',
'notificationcontroller.dll',
'notificationcontrollerps.dll',
'notificationobjfactory.dll',
'notificationplatformcomponent.dll',
'npsmdesktopprovider.dll',
'ntlmshared.dll',
'oemlicense.dll',
'offlinelsa.dll',
'offlinesam.dll',
'offreg.dll',
'omadmagent.dll',
'omadmapi.dll',
'ondemandbrokerclient.dll',
'ondemandconnroutehelper.dll',
'onebackuphandler.dll',
'onedrivesettingsyncprovider.dll',
'pcsvdevice.dll',
'peerdistcacheprovider.dll',
'personax.dll',
'phonecallhistoryapis.dll',
'phoneom.dll',
'phoneplatformabstraction.dll',
'phoneservice.dll',
'phoneserviceres.dll',
'phoneutil.dll',
'phoneutilres.dll',
'pimindexmaintenance.dll',
'pimindexmaintenanceclient.dll',
'pimstore.dll',
'pinenrollment.dll',
'playtodevice.dll',
'playtomenu.dll',
'playtoreceiver.dll',
'ploptin.dll',
'pnpclean.dll',
'policymanagerprecheck.dll',
'posyncservices.dll',
'powercfg.cpl',
'prauthproviders.dll',
'printdialogs3d.dll',
'printplatformconfig.dll',
'printwsdahost.dll',
'prm0009.dll',
'provdatastore.dll',
'provengine.dll',
'provhandlers.dll',
'provisioningcsp.dll',
'provisioninghandlers.dll',
'provops.dll',
'provpackageapidll.dll',
'provplatformdesktop.dll',
'provplugineng.dll',
'proximitycommonpal.dll',
'proximityrtapipal.dll',
'proximityservicepal.dll',
'psmserviceexthost.dll',
'quickactionsdatamodel.dll',
'radcui.dll',
'raschapext.dll',
'rascustom.dll',
'rasmediamanager.dll',
'rastlsext.dll',
'rdbui.dll',
'rdpsaps.dll',
'rdvidcrl.dll',
'rdxservice.dll',
'rdxtaskfactory.dll',
'readingviewresources.dll',
'reagenttask.dll',
'reinfo.dll',
'remoteaudioendpoint.dll',
'remotenaturallanguage.dll',
'remotewipecsp.dll',
'removablemediaprovisioningplugin.dll',
'reportingcsp.dll',
'rilproxy.dll',
'rmsroamingsecurity.dll',
'rtmediaframe.dll',
'rtworkq.dll',
'sbservicetrigger.dll',
'scapi.dll',
'scdeviceenum.dll',
'search.protocolhandler.mapi2.dll',
'sebbackgroundmanagerpolicy.dll',
'sensorcustomadbalgorithm.dll',
'sensorservice.dll',
'sensorsnativeapi.dll',
'sensorsnativeapi.v2.dll',
'sensorsutilsv2.dll',
'setproxycredential.dll',
'settingsextensibilityhandlers.dll',
'settingshandlers_closedcaptioning.dll',
'settingshandlers_flashlight.dll',
'settingshandlers_geolocation.dll',
'settingshandlers_maps.dll',
'settingshandlers_notifications.dll',
'settingshandlers_nt.dll',
'settingshandlers_onecore_batterysaver.dll',
'settingshandlers_privacy.dll',
'settingshandlers_signinoptions.dll',
'settingshandlers_siuf.dll',
'settingshandlers_storagesense.dll',
'settingshandlers_useraccount.dll',
'settingsynccore.dll',
'settingsyncpolicy.dll',
'sharedstartmodelshim.dll',
'sharehost.dll',
'shutdownux.dll',
'slpts.dll',
'smphost.dll',
'smsroutersvc.dll',
'speechpal.dll',
'srhinproc.dll',
'staterepository.core.dll',
'storageusage.dll',
'storeagent.dll',
'suplcsps.dll',
'surfacehubhandlers.dll',
'synccontroller.dll',
'syncmlhook.dll',
'syncproxy.dll',
'syncres.dll',
'syncsettings.dll',
'syncutil.dll',
'sysdm.cpl',
'systemsettings.datamodel.dll',
'systemsettings.deviceencryptionhandlers.dll',
'systemsettings.handlers.dll',
'systemsettings.useraccountshandlers.dll',
'systemsettingsthresholdadminflowui.dll',
'tabletpc.cpl',
'tbauth.dll',
'telephon.cpl',
'tetheringclient.dll',
'tetheringconfigsp.dll',
'tetheringieprovider.dll',
'tetheringmgr.dll',
'tetheringservice.dll',
'tetheringstation.dll',
'textinputframework.dll',
'tileobjserver.dll',
'timedate.cpl',
'tokenbinding.dll',
'tokenbroker.dll',
'tokenbrokerui.dll',
'tpmcertresources.dll',
'tpmcoreprovisioning.dll',
'tssessionux.dll',
'ttlsext.dll',
'twinui.appcore.dll',
'tzsyncres.dll',
'ucrtbase.dll',
'umpo-overrides-base.dll',
'umpo-overrides-xpc.dll',
'unenrollhook.dll',
'unistore.dll',
'updatecsp.dll',
'updatehandlers.dll',
'updatepolicy.dll',
'userdataaccessres.dll',
'userdataaccountapis.dll',
'userdatalanguageutil.dll',
'userdataplatformhelperutil.dll',
'userdataservice.dll',
'userdatatimeutil.dll',
'userdatatypehelperutil.dll',
'userdeviceregistration.dll',
'userdeviceregistration.ngc.dll',
'usermgr.dll',
'usermgrcli.dll',
'usermgrproxy.dll',
'usoapi.dll',
'usocore.dll',
'uvoipbackgroundmanagerpolicy.dll',
'vboxd3d9wddm.dll',
'vcardparser.dll',
'vedatalayerhelpers.dll',
'veeventdispatcher.dll',
'vestoreeventhandlers.dll',
'voiceactivationmanager.dll',
'vpnv2csp.dll',
'walletbackgroundserviceproxy.dll',
'walletproxy.dll',
'walletservice.dll',
'wephostsvc.dll',
'weretw.dll',
'wificonfigsp.dll',
'wificonnapi.dll',
'wifidisplay.dll',
'wifinetworkmanager.dll',
'wifiprofilessettinghandler.dll',
'winbici.dll',
'winbiodatamodel.dll',
'winbioext.dll',
'windows.accountscontrol.dll',
'windows.applicationmodel.core.dll',
'windows.applicationmodel.lockscreen.dll',
'windows.applicationmodel.wallet.dll',
'windows.clouddomainjoinaug.proxystub.dll',
'windows.cortana.dll',
'windows.cortana.pal.desktop.dll',
'windows.cortana.proxystub.dll',
'windows.data.pdf.dll',
'windows.devices.alljoyn.dll',
'windows.devices.background.dll',
'windows.devices.background.ps.dll',
'windows.devices.bluetooth.dll',
'windows.devices.custom.dll',
'windows.devices.custom.ps.dll',
'windows.devices.humaninterfacedevice.dll',
'windows.devices.lights.dll',
'windows.devices.lowlevel.dll',
'windows.devices.midi.dll',
'windows.devices.perception.dll',
'windows.devices.picker.dll',
'windows.devices.pointofservice.dll',
'windows.devices.printers.dll',
'windows.devices.radios.dll',
'windows.devices.scanners.dll',
'windows.devices.serialcommunication.dll',
'windows.devices.smartcards.dll',
'windows.devices.usb.dll',
'windows.devices.wifi.dll',
'windows.devices.wifidirect.dll',
'windows.energy.dll',
'windows.gaming.input.dll',
'windows.gaming.preview.dll',
'windows.gaming.xboxlive.storage.dll',
'windows.graphics.printing.3d.dll',
'windows.internal.bluetooth.dll',
'windows.internal.management.dll',
'windows.internal.shell.broker.dll',
'windows.internal.ui.bioenrollment.proxystub.dll',
'windows.internal.ui.logon.proxystub.dll',
'windows.management.lockdown.dll',
'windows.management.provisioning.proxystub.dll',
'windows.management.workplace.workplacesettings.dll',
'windows.media.audio.dll',
'windows.media.backgroundmediaplayback.dll',
'windows.media.editing.dll',
'windows.media.faceanalysis.dll',
'windows.media.ocr.dll',
'windows.media.photo.import.dll',
'windows.media.playback.backgroundmediaplayer.dll',
'windows.media.playback.mediaplayer.dll',
'windows.media.playback.proxystub.dll',
'windows.media.protection.playready.dll',
'windows.media.speech.dll',
'windows.media.speech.uxres.dll',
'windows.networking.backgroundtransfer.backgroundmanagerpolicy.dll',
'windows.networking.backgroundtransfer.contentprefetchtask.dll',
'windows.networking.hostname.dll',
'windows.networking.servicediscovery.dnssd.dll',
'windows.networking.ux.eaprequesthandler.dll',
'windows.networking.ux.proxystub.dll',
'windows.networking.vpn.dll',
'windows.security.authentication.web.core.dll',
'windows.security.credentials.ui.userconsentverifier.dll',
'windows.shell.search.urihandler.dll',
'windows.shell.servicehostbuilder.dll',
'windows.speech.pal.dll',
'windows.staterepository.dll',
'windows.storage.dll',
'windows.storage.search.dll',
'windows.system.diagnostics.dll',
'windows.system.launcher.dll',
'windows.system.profile.retailinfo.dll',
'windows.system.profile.systemmanufacturers.dll',
'windows.system.systemmanagement.dll',
'windows.ui.biofeedback.dll',
'windows.ui.blockedshutdown.dll',
'windows.ui.core.textinput.dll',
'windows.ui.cred.dll',
'windows.ui.logon.dll',
'windows.ui.picturepassword.dll',
'windows.ui.search.dll',
'windows.ui.shell.dll',
'windows.ui.xaml.maps.dll',
'windows.ui.xaml.phone.dll',
'windows.ui.xaml.resources.dll',
'windows.web.diagnostics.dll',
'windows.web.http.dll',
'windows.xbox.networking.proxystub.dll',
'windowscodecsraw.dll',
'windowsperformancerecordercontrol.dll',
'wined3dwddm.dll',
'winipcfile.dll',
'winipcsecproc.dll',
'winipcsecproc_ssp.dll',
'winjson.dll',
'winlogonext.dll',
'winmsipc.dll',
'winnlsres.dll',
'winrttracing.dll',
'winsetupui.dll',
'wlanmediamanager.dll',
'wlansvcpal.dll',
'woftasks.dll',
'wofutil.dll',
'wordbreakers.dll',
'workfolderscontrol.dll',
'workfoldersgpext.dll',
'workfoldersres.dll',
'workfoldersshell.dll',
'workfolderssvc.dll',
'wpbcreds.dll',
'wpkbdlayout.dll',
'wpnservice.dll',
'wpportinglibrary.dll',
'wpprecorderum.dll',
'wptaskscheduler.dll',
'wpx.dll',
'wscui.cpl',
'wshhyperv.dll',
'wsp_fs.dll',
'wsp_health.dll',
'wsp_sr.dll',
'wsplib.dll',
'wuau.dll',
'wuautoappupdate.dll',
'wudfsmcclassext.dll',
'wudfx02000.dll',
'wuuhext.dll',
'wwaext.dll',
'xamldiagnostics.dll',
'xaudio2_9.dll',
'xblauthmanager.dll',
'xblauthmanagerproxy.dll',
'xblauthtokenbrokerext.dll',
'xblgamesave.dll',
'xblgamesaveproxy.dll',
'xboxnetapisvc.dll',
'xinputuap.dll',
'xpsdocumenttargetprint.dll',
'ztrace_ca.dll',
'ztrace_maps.dll',
])
COMMON_IMPORT_LIBS = COMMON_IMPORT_LIBS | WIN7_DLLS | WIN8_DLLS
| <filename>rl_threat_hunting/filter/import_libs.py
COMMON_IMPORT_LIBS = frozenset([
'a4wapi.dll',
'a4wrv.dll',
'aavm4h.dll',
'ac_as.dll',
'ac_c.dll',
'accesor.dll',
'accom.dll',
'accommonclass.dll',
'accommondialog.dll',
'accore.dll',
'acctrl.dll',
'acdb18.dll',
'acdllenv.dll',
'ace-w64r-20-1.dll',
'ace.dll',
'ace32.dll',
'acecore.dll',
'acewstr.dll',
'acobject.dll',
'acpal.dll',
'acs_util.dll',
'activeds.dll',
'adac20b.dll',
'adapt_for_imports.dll',
'adm_core6.dll',
'adm_coreutils6.dll',
'adobexmp.dll',
'ael73.dll',
'ael73d.dll',
'aepic.dll',
'aevlsub.dll',
'afbase.dll',
'afc31.dll',
'afcore.dll',
'aftutils.dll',
'afutil.dll',
'agm.dll',
'algorithms_base_release_x64.dll',
'amqxcs2.dll',
'apexframework_x64.dll',
'api-device-config.dll',
'api-ms-win-appmodel-identity-l1-2-0.dll',
'api-ms-win-appmodel-runtime-internal-l1-1-1.dll',
'api-ms-win-appmodel-runtime-internal-l1-1-2.dll',
'api-ms-win-appmodel-runtime-l1-1-0.dll',
'api-ms-win-appmodel-runtime-l1-1-1.dll',
'api-ms-win-appmodel-state-l1-2-0.dll',
'api-ms-win-appmodel-unlock-l1-1-0.dll',
'api-ms-win-core-apiquery-l1-1-0.dll',
'api-ms-win-core-atoms-l1-1-0.dll',
'api-ms-win-core-com-l1-1-0.dll',
'api-ms-win-core-com-l1-1-1.dll',
'api-ms-win-core-com-l2-1-1.dll',
'api-ms-win-core-com-midlproxystub-l1-1-0.dll',
'api-ms-win-core-com-private-l1-1-0.dll',
'api-ms-win-core-console-l1-1-0.dll',
'api-ms-win-core-console-l2-1-0.dll',
'api-ms-win-core-crt-l1-1-0.dll',
'api-ms-win-core-crt-l2-1-0.dll',
'api-ms-win-core-datetime-l1-1-0.dll',
'api-ms-win-core-datetime-l1-1-1.dll',
'api-ms-win-core-debug-l1-1-0.dll',
'api-ms-win-core-debug-l1-1-1.dll',
'api-ms-win-core-delayload-l1-1-0.dll',
'api-ms-win-core-delayload-l1-1-1.dll',
'api-ms-win-core-errorhandling-l1-1-0.dll',
'api-ms-win-core-errorhandling-l1-1-1.dll',
'api-ms-win-core-errorhandling-l1-1-2.dll',
'api-ms-win-core-errorhandling-l1-1-3.dll',
'api-ms-win-core-featurestaging-l1-1-0.dll',
'api-ms-win-core-fibers-l1-1-0.dll',
'api-ms-win-core-fibers-l1-1-1.dll',
'api-ms-win-core-file-l1-1-0.dll',
'api-ms-win-core-file-l1-2-0.dll',
'api-ms-win-core-file-l1-2-1.dll',
'api-ms-win-core-file-l1-2-2.dll',
'api-ms-win-core-file-l2-1-0.dll',
'api-ms-win-core-file-l2-1-1.dll',
'api-ms-win-core-file-l2-1-2.dll',
'api-ms-win-core-handle-l1-1-0.dll',
'api-ms-win-core-heap-l1-1-0.dll',
'api-ms-win-core-heap-l1-2-0.dll',
'api-ms-win-core-heap-l2-1-0.dll',
'api-ms-win-core-heap-obsolete-l1-1-0.dll',
'api-ms-win-core-interlocked-l1-1-0.dll',
'api-ms-win-core-interlocked-l1-2-0.dll',
'api-ms-win-core-io-l1-1-0.dll',
'api-ms-win-core-io-l1-1-1.dll',
'api-ms-win-core-job-l1-1-0.dll',
'api-ms-win-core-job-l2-1-0.dll',
'api-ms-win-core-kernel32-legacy-l1-1-0.dll',
'api-ms-win-core-kernel32-legacy-l1-1-1.dll',
'api-ms-win-core-kernel32-private-l1-1-0.dll',
'api-ms-win-core-kernel32-private-l1-1-1.dll',
'api-ms-win-core-largeinteger-l1-1-0.dll',
'api-ms-win-core-libraryloader-l1-1-0.dll',
'api-ms-win-core-libraryloader-l1-1-1.dll',
'api-ms-win-core-libraryloader-l1-2-0.dll',
'api-ms-win-core-libraryloader-l1-2-1.dll',
'api-ms-win-core-libraryloader-l1-2-2.dll',
'api-ms-win-core-libraryloader-l2-1-0.dll',
'api-ms-win-core-localization-l1-1-0.dll',
'api-ms-win-core-localization-l1-2-0.dll',
'api-ms-win-core-localization-l1-2-1.dll',
'api-ms-win-core-localization-l1-2-2.dll',
'api-ms-win-core-localization-l2-1-0.dll',
'api-ms-win-core-localization-obsolete-l1-2-0.dll',
'api-ms-win-core-localization-private-l1-1-0.dll',
'api-ms-win-core-localregistry-l1-1-0.dll',
'api-ms-win-core-marshal-l1-1-0.dll',
'api-ms-win-core-memory-l1-1-0.dll',
'api-ms-win-core-memory-l1-1-1.dll',
'api-ms-win-core-memory-l1-1-2.dll',
'api-ms-win-core-memory-l1-1-3.dll',
'api-ms-win-core-misc-l1-1-0.dll',
'api-ms-win-core-namedpipe-l1-1-0.dll',
'api-ms-win-core-namedpipe-l1-2-0.dll',
'api-ms-win-core-namespace-l1-1-0.dll',
'api-ms-win-core-normalization-l1-1-0.dll',
'api-ms-win-core-path-l1-1-0.dll',
'api-ms-win-core-perfcounters-l1-1-0.dll',
'api-ms-win-core-privateprofile-l1-1-0.dll',
'api-ms-win-core-privateprofile-l1-1-1.dll',
'api-ms-win-core-processenvironment-l1-1-0.dll',
'api-ms-win-core-processenvironment-l1-2-0.dll',
'api-ms-win-core-processthreads-l1-1-0.dll',
'api-ms-win-core-processthreads-l1-1-1.dll',
'api-ms-win-core-processthreads-l1-1-2.dll',
'api-ms-win-core-processthreads-l1-1-3.dll',
'api-ms-win-core-processtopology-obsolete-l1-1-0.dll',
'api-ms-win-core-profile-l1-1-0.dll',
'api-ms-win-core-psapi-ansi-l1-1-0.dll',
'api-ms-win-core-psapi-l1-1-0.dll',
'api-ms-win-core-psm-app-l1-1-0.dll',
'api-ms-win-core-psm-key-l1-1-0.dll',
'api-ms-win-core-quirks-l1-1-0.dll',
'api-ms-win-core-realtime-l1-1-0.dll',
'api-ms-win-core-registry-l1-1-0.dll',
'api-ms-win-core-registry-l1-1-1.dll',
'api-ms-win-core-registry-l2-1-0.dll',
'api-ms-win-core-registryuserspecific-l1-1-0.dll',
'api-ms-win-core-rtlsupport-l1-1-0.dll',
'api-ms-win-core-rtlsupport-l1-2-0.dll',
'api-ms-win-core-shlwapi-legacy-l1-1-0.dll',
'api-ms-win-core-shlwapi-obsolete-l1-1-0.dll',
'api-ms-win-core-shutdown-l1-1-0.dll',
'api-ms-win-core-sidebyside-l1-1-0.dll',
'api-ms-win-core-string-l1-1-0.dll',
'api-ms-win-core-string-l2-1-0.dll',
'api-ms-win-core-string-l2-1-1.dll',
'api-ms-win-core-string-obsolete-l1-1-0.dll',
'api-ms-win-core-stringansi-l1-1-0.dll',
'api-ms-win-core-synch-l1-1-0.dll',
'api-ms-win-core-synch-l1-2-0.dll',
'api-ms-win-core-synch-l1-2-1.dll',
'api-ms-win-core-sysinfo-l1-1-0.dll',
'api-ms-win-core-sysinfo-l1-2-0.dll',
'api-ms-win-core-sysinfo-l1-2-1.dll',
'api-ms-win-core-sysinfo-l1-2-3.dll',
'api-ms-win-core-systemtopology-l1-1-0.dll',
'api-ms-win-core-threadpool-l1-1-0.dll',
'api-ms-win-core-threadpool-l1-2-0.dll',
'api-ms-win-core-threadpool-legacy-l1-1-0.dll',
'api-ms-win-core-threadpool-private-l1-1-0.dll',
'api-ms-win-core-timezone-l1-1-0.dll',
'api-ms-win-core-toolhelp-l1-1-0.dll',
'api-ms-win-core-url-l1-1-0.dll',
'api-ms-win-core-util-l1-1-0.dll',
'api-ms-win-core-version-l1-1-0.dll',
'api-ms-win-core-version-l1-1-1.dll',
'api-ms-win-core-windowserrorreporting-l1-1-0.dll',
'api-ms-win-core-windowserrorreporting-l1-1-1.dll',
'api-ms-win-core-winrt-error-l1-1-0.dll',
'api-ms-win-core-winrt-error-l1-1-1.dll',
'api-ms-win-core-winrt-l1-1-0.dll',
'api-ms-win-core-winrt-propertysetprivate-l1-1-1.dll',
'api-ms-win-core-winrt-robuffer-l1-1-0.dll',
'api-ms-win-core-winrt-string-l1-1-0.dll',
'api-ms-win-core-wow64-l1-1-0.dll',
'api-ms-win-core-wow64-l1-1-1.dll',
'api-ms-win-crt-conio-l1-1-0.dll',
'api-ms-win-crt-convert-l1-1-0.dll',
'api-ms-win-crt-environment-l1-1-0.dll',
'api-ms-win-crt-filesystem-l1-1-0.dll',
'api-ms-win-crt-heap-l1-1-0.dll',
'api-ms-win-crt-locale-l1-1-0.dll',
'api-ms-win-crt-math-l1-1-0.dll',
'api-ms-win-crt-multibyte-l1-1-0.dll',
'api-ms-win-crt-private-l1-1-0.dll',
'api-ms-win-crt-process-l1-1-0.dll',
'api-ms-win-crt-runtime-l1-1-0.dll',
'api-ms-win-crt-stdio-l1-1-0.dll',
'api-ms-win-crt-string-l1-1-0.dll',
'api-ms-win-crt-time-l1-1-0.dll',
'api-ms-win-crt-utility-l1-1-0.dll',
'api-ms-win-devices-config-l1-1-1.dll',
'api-ms-win-devices-query-l1-1-0.dll',
'api-ms-win-devices-query-l1-1-1.dll',
'api-ms-win-downlevel-advapi32-l1-1-0.dll',
'api-ms-win-downlevel-advapi32-l2-1-0.dll',
'api-ms-win-downlevel-kernel32-l1-1-0.dll',
'api-ms-win-downlevel-normaliz-l1-1-0.dll',
'api-ms-win-downlevel-ole32-l1-1-0.dll',
'api-ms-win-downlevel-shell32-l1-1-0.dll',
'api-ms-win-downlevel-shlwapi-l1-1-0.dll',
'api-ms-win-downlevel-shlwapi-l2-1-0.dll',
'api-ms-win-downlevel-user32-l1-1-0.dll',
'api-ms-win-downlevel-version-l1-1-0.dll',
'api-ms-win-dx-d3dkmt-l1-1-0.dll',
'api-ms-win-eventing-classicprovider-l1-1-0.dll',
'api-ms-win-eventing-consumer-l1-1-0.dll',
'api-ms-win-eventing-controller-l1-1-0.dll',
'api-ms-win-eventing-legacy-l1-1-0.dll',
'api-ms-win-eventing-provider-l1-1-0.dll',
'api-ms-win-eventing-tdh-l1-1-0.dll',
'api-ms-win-eventlog-legacy-l1-1-0.dll',
'api-ms-win-mm-time-l1-1-0.dll',
'api-ms-win-ntuser-rectangle-l1-1-0.dll',
'api-ms-win-ntuser-sysparams-l1-1-0.dll',
'api-ms-win-ole32-ie-l1-1-0.dll',
'api-ms-win-oobe-notification-l1-1-0.dll',
'api-ms-win-power-base-l1-1-0.dll',
'api-ms-win-power-setting-l1-1-0.dll',
'api-ms-win-rtcore-ntuser-clipboard-l1-1-0.dll',
'api-ms-win-rtcore-ntuser-private-l1-1-0.dll',
'api-ms-win-rtcore-ntuser-synch-l1-1-0.dll',
'api-ms-win-rtcore-ntuser-window-l1-1-0.dll',
'api-ms-win-security-accesshlpr-l1-1-0.dll',
'api-ms-win-security-activedirectoryclient-l1-1-0.dll',
'api-ms-win-security-base-l1-1-0.dll',
'api-ms-win-security-base-l1-2-0.dll',
'api-ms-win-security-capability-l1-1-0.dll',
'api-ms-win-security-credentials-l1-1-0.dll',
'api-ms-win-security-credentials-l2-1-0.dll',
'api-ms-win-security-cryptoapi-l1-1-0.dll',
'api-ms-win-security-grouppolicy-l1-1-0.dll',
'api-ms-win-security-isolatedcontainer-l1-1-0.dll',
'api-ms-win-security-lsalookup-l1-1-0.dll',
'api-ms-win-security-lsalookup-l1-1-1.dll',
'api-ms-win-security-lsalookup-l1-1-2.dll',
'api-ms-win-security-lsalookup-l2-1-0.dll',
'api-ms-win-security-lsalookup-l2-1-1.dll',
'api-ms-win-security-lsapolicy-l1-1-0.dll',
'api-ms-win-security-provider-l1-1-0.dll',
'api-ms-win-security-sddl-l1-1-0.dll',
'api-ms-win-security-systemfunctions-l1-1-0.dll',
'api-ms-win-security-trustee-l1-1-0.dll',
'api-ms-win-service-core-l1-1-0.dll',
'api-ms-win-service-core-l1-1-1.dll',
'api-ms-win-service-management-l1-1-0.dll',
'api-ms-win-service-management-l2-1-0.dll',
'api-ms-win-service-private-l1-1-0.dll',
'api-ms-win-service-winsvc-l1-1-0.dll',
'api-ms-win-service-winsvc-l1-2-0.dll',
'api-ms-win-shcore-comhelpers-l1-1-0.dll',
'api-ms-win-shcore-obsolete-l1-1-0.dll',
'api-ms-win-shcore-path-l1-1-0.dll',
'api-ms-win-shcore-registry-l1-1-0.dll',
'api-ms-win-shcore-registry-l1-1-1.dll',
'api-ms-win-shcore-scaling-l1-1-0.dll',
'api-ms-win-shcore-scaling-l1-1-1.dll',
'api-ms-win-shcore-stream-l1-1-0.dll',
'api-ms-win-shcore-stream-winrt-l1-1-0.dll',
'api-ms-win-shcore-sysinfo-l1-1-0.dll',
'api-ms-win-shcore-taskpool-l1-1-0.dll',
'api-ms-win-shcore-thread-l1-1-0.dll',
'api-ms-win-shcore-unicodeansi-l1-1-0.dll',
'api-ms-win-shell-namespace-l1-1-0.dll',
'api-ms-win-shell-shdirectory-l1-1-0.dll',
'api-ms-win-shell-shellcom-l1-1-0.dll',
'api-ms-win-shell-shellfolders-l1-1-0.dll',
'api-ms-win-shlwapi-winrt-storage-l1-1-1.dll',
'api-ms-win-stateseparation-helpers-l1-1-0.dll',
'apibasegm.dll',
'apiclient.dll',
'app.dll',
'apphelp.dll',
'application.dll',
'apputil.dll',
'appvisvsubsystems32.dll',
'appvisvsubsystems64.dll',
'appxalluserstore.dll',
'apr-iconv.dll',
'archive.dll',
'arkimage.dll',
'arkiostub.dll',
'armgrclientapi.dll',
'arrays.dll',
'arrorden.dll',
'arsp.dll',
'arst.dll',
'asciilib.dll',
'ascom10.dll',
'ashbase.dll',
'ashtask.dll',
'asio.dll',
'asl.dll',
'aslfoundation.dll',
'aswcmnbs.dll',
'aswcmnis.dll',
'aswcmnos.dll',
'aswengin.dll',
'aswengldr.dll',
'aswip.dll',
'aswlog.dll',
'aswproperty.dll',
'asynctask.dll',
'ataxsub.dll',
'atl71.dll',
'atl80.dll',
'atl90.dll',
'atl100.dll',
'atl110.dll',
'atom.dll',
'atwbxui15.dll',
'audioeng.dll',
'authz.dll',
'avcodec-52.dll',
'avcodec-55.dll',
'avcodec-56.dll',
'avcodec-57.dll',
'avcodec-58.dll',
'avcodec.dll',
'avdevice-52.dll',
'avdevice-58.dll',
'avfilter-6.dll',
'avfilter-7.dll',
'avformat-52.dll',
'avformat-55.dll',
'avformat-56.dll',
'avformat-57.dll',
'avformat-58.dll',
'avformat.dll',
'avkys.dll',
'avrt.dll',
'avtdatabase_ser.dll',
'avtdbatts.dll',
'avutil-50.dll',
'avutil-51.dll',
'avutil-52.dll',
'avutil-54.dll',
'avutil-55.dll',
'avutil-56.dll',
'avutil.dll',
'awt.dll',
'ax5api.dll',
'axe8sharedexpat.dll',
'aygshell.dll',
'backend.dll',
'bafl.dll',
'base.dll',
'basebroker2.dll',
'basez.dll',
'basic.dll',
'basicos.dll',
'basicos2.dll',
'bass.dll',
'bass_fx.dll',
'bc32fn.dll',
'bc32ui.dll',
'bc_api.dll',
'bcd.dll',
'bcgcbpro300.dll',
'bcgcbpro2430u120.dll',
'bcgcbpro2730ud141.dll',
'bcgcbpro3020.dll',
'bcp47langs.dll',
'bcrypt.dll',
'bcryptprimitives.dll',
'bds52f.dll',
'bib.dll',
'binkw32.dll',
'biosdk.dll',
'bluetoothapis.dll',
'bmm.dll',
'boom16_mtc.dll',
'boost_chrono-vc140-mt-1_60.dll',
'boost_date_time-vc140-mt-1_60.dll',
'boost_date_time.dll',
'boost_filesystem-mt.dll',
'boost_filesystem-vc140-mt-1_60.dll',
'boost_filesystem.dll',
'boost_program_options-w64r-20-1.dll',
'boost_python-vc100-mt-1_56.dll',
'boost_system-mt.dll',
'boost_system-vc120-mt-1_55.dll',
'boost_system-vc140-mt-1_60.dll',
'boost_system.dll',
'boost_thread-vc140-mt-1_60.dll',
'boost_threads.dll',
'borlndmm.dll',
'bugsplat.dll',
'bugtrap.dll',
'c0cdll1.dll',
'c0otb.dll',
'c2rui.dll',
'c4dll.dll',
'c5runx.dll',
'c55runx.dll',
'c60ascx.dll',
'c60dosx.dll',
'c60olex.dll',
'c60runx.dll',
'c60tpsx.dll',
'c_acs001.dll',
'c_atx001.dll',
'c_thgfi.dll',
'ca210_comm.dll',
'cabinet.dll',
'catafrfoundation.dll',
'catapplicationframe.dll',
'catdialogengine.dll',
'catgeometricobjects.dll',
'catgitinterfaces.dll',
'catgmgeometricinterfaces.dll',
'catgmmodelinterfaces.dll',
'catiaapplicationframe.dll',
'catinteractiveinterfaces.dll',
'catliteralfeatures.dll',
'catmathematics.dll',
'catmathstream.dll',
'catmechanicalmodeler.dll',
'catmecmodinterfaces.dll',
'catobjectmodelerbase.dll',
'catobjectmodelernavigator.dll',
'catobjectspecsmodeler.dll',
'catomx.dll',
'catplmidentificationaccess.dll',
'catproductstructure1.dll',
'catproductstructureinterfaces.dll',
'catsketcherinterfaces.dll',
'catsysts.dll',
'cattopologicalobjects.dll',
'catvisitf.dll',
'catvisualization.dll',
'catviz.dll',
'cblrtsm.dll',
'cblrtss.dll',
'cc3250mt.dll',
'cc3260mt.dll',
'cc3270mt.dll',
'cc3280mt.dll',
'cc32250mt.dll',
'cclib.dll',
'ccmcore.dll',
'cellcore.dll',
'cerlapp0471.dll',
'cerlsql0471.dll',
'cfgmgr32.dll',
'cg.dll',
'cggl.dll',
'chakracore.dll',
'chapp.dll',
'chart.dll',
'chrome_elf.dll',
'cktbl32.dll',
'claasc.dll',
'clabas.dll',
'clados.dll',
'clafm3.dll',
'clamss.dll',
'clanet.dll',
'claole.dll',
'clarun.dll',
'clatps.dll',
'clawe.dll',
'clblas.dll',
'clblast.dll',
'client_monitor.dll',
'clientapi.dll',
'clutilclasses.dll',
'cmmlib.dll',
'cmnapp.dll',
'cmnbind.dll',
'cmngen.dll',
'cmngui.dll',
'combase.dll',
'commandmanager.dll',
'commctrl.dll',
'common.dll',
'commondata.dll',
'commonlib.dll',
'commonui.dll',
'comphelp4msc.dll',
'comphelper.dll',
'comphelpmsc.dll',
'componen.dll',
'componentslib.dll',
'comppkgsup.dll',
'concrt140.dll',
'concrt140_app.dll',
'config.dll',
'confint.dll',
'conpastilla.dll',
'contcype.dll',
'controls.dll',
'cooltype.dll',
'coom14_mtc.dll',
'core.dll',
'core4.dll',
'core83.dll',
'core_rl_magick_.dll',
'core_rl_magickcore_.dll',
'coreapp.dll',
'coredll.dll',
'corefoundation.dll',
'coreint.dll',
'corelocalization.dll',
'coremanager.dll',
'coremessaging.dll',
'coretime.dll',
'coreuicomponents.dll',
'cp3245mt.dll',
'cpd3core.dll',
'cpd3datacore.dll',
'cpptools4.dll',
'cppu3.dll',
'cppuhelper3msc.dll',
'crashhandler.dll',
'crashreport.dll',
'crashrpt.dll',
'crashrpt1402.dll',
'crashrpt1403.dll',
'credui.dll',
'crlcomponent.dll',
'crlfrmwk.dll',
'crlmath.dll',
'crlresources.dll',
'crlutils.dll',
'crlutl.dll',
'crpe32.dll',
'crypt32.dll',
'cryptbase.dll',
'cryptngc.dll',
'cryptsp.dll',
'cryptui.dll',
'cryptxml.dll',
'cs200_usbcomm.dll',
'cscapi.dll',
'csi.dll',
'ctlapi.dll',
'ctreestd.dll',
'cvappmgr.dll',
'cvarchive.dll',
'cvbasiclib.dll',
'cvdatapipe.dll',
'cvfocus.dll',
'cvirte.dll',
'cvjobclient.dll',
'cvjobquery.dll',
'cvlib.dll',
'cvmmclientapi.dll',
'cvperformancemonitorlib.dll',
'cvsession.dll',
'cvxmlmsgsbase.dll',
'cw32core.dll',
'cw3230.dll',
'cw_main.dll',
'cwcommon.dll',
'cwdatabase.dll',
'cwhhla.dll',
'cwqtlib.dll',
'cximage.dll',
'cximagecrtu.dll',
'cxlibw-5-0.dll',
'cxxwrap_julia.dll',
'cyassert.dll',
'cygbabl-0.1-0.dll',
'cyggcc_s-1.dll',
'cyggcc_s-seh-1.dll',
'cyggegl-0.2-0.dll',
'cyggegl-0.4-0.dll',
'cygglib-2.0-0.dll',
'cyggobject-2.0-0.dll',
'cygiconv-2.dll',
'cygintl-8.dll',
'cygkritaglobal-16.dll',
'cygkritaimage-16.dll',
'cygkritaui-16.dll',
'cygqtcore-4.dll',
'cygruby200.dll',
'cygstdc++-6.dll',
'cygwin1.dll',
'cygwin10.dll',
'cygx11-6.dll',
'cygxi-6.dll',
'cygz.dll',
'cygznc-1.7.5.dll',
'cypedir.dll',
'cypeio.dll',
'cypemath.dll',
'cypemem.dll',
'cypemsgs.dll',
'cypestr.dll',
'd2d1.dll',
'd3d8.dll',
'd3d9.dll',
'd3d10.dll',
'd3d10_1.dll',
'd3d11.dll',
'd3d12.dll',
'd3dcompiler_43.dll',
'd3dcompiler_47.dll',
'd3dx9_26.dll',
'd3dx9_30.dll',
'd3dx9_40.dll',
'd3dx9_42.dll',
'd3dx9_43.dll',
'd3dx10_43.dll',
'd3dx11_43.dll',
'd3dxof.dll',
'dacommon.dll',
'dalog.dll',
'dastock.dll',
'data.dll',
'database.dll',
'databasemanager.dll',
'datacenter.dll',
'datahelpers.dll',
'datalayer.dll',
'daui.dll',
'dautil.dll',
'dbgeng.dll',
'dbghelp.dll',
'dclibxml2.dll',
'dclipx.dll',
'dcomp.dll',
'dcp-1.0.dll',
'dcpomatic2.dll',
'ddimage4.0.dll',
'ddraw.dll',
'deng_core.dll',
'deploy.dll',
'detoured.dll',
'deviceassociation.dll',
'devil.dll',
'devmgr.dll',
'devobj.dll',
'devsdk_base_release_x64.dll',
'devsdk_data_avol_release_x64.dll',
'devsdk_data_release_x64.dll',
'dhcpcsvc.dll',
'dhcpcsvc6.dll',
'di0panv2.dll',
'dialogcommon.dll',
'difxapi.dll',
'dinput.dll',
'dinput8.dll',
'dll_loader.dll',
'dmcmnutils.dll',
'doc.dll',
'dplayx.dll',
'drs32.dll',
'drvstore.dll',
'dsbacr32.dll',
'dsbaf32.dll',
'dsbas32.dll',
'dsdacl32.dll',
'dsetup.dll',
'dsintr32.dll',
'dsparse.dll',
'dsreg.dll',
'dsrole.dll',
'dssabc32.dll',
'dssys32.dll',
'dssysu32.dll',
'dui70.dll',
'duifw.dll',
'duilib.dll',
'duser.dll',
'dvacore.dll',
'dvamediatypes.dll',
'dvaui.dll',
'dvaunittesting.dll',
'dvaworkspace.dll',
'dwbase.dll',
'dwrite.dll',
'dwutility.dll',
'dxgi.dll',
'dxva2.dll',
'dynamiclink.dll',
'dynamorio.dll',
'eappcfg.dll',
'eax.dll',
'ecommon.dll',
'edgeiso.dll',
'editormodel.dll',
'editorserialization.dll',
'eesofcore.dll',
'efsadu.dll',
'efsrv.dll',
'efsutil.dll',
'elementcommon.dll',
'elscore.dll',
'emmisc.dll',
'encoding-conversion.dll',
'engine.dll',
'entograf.dll',
'env100.dll',
'env200.dll',
'ercoreclbbase471.dll',
'ercoreclbmemory471.dll',
'err_base.dll',
'errorlog.dll',
'errorreport.dll',
'errortrace.dll',
'ersyscclbsystem471.dll',
'ersysdclbrecsystem471.dll',
'ersystclbcomutil471.dll',
'ersystclbdataaccessmgr471.dll',
'euser.dll',
'event_manager.dll',
'event_routing.dll',
'evr.dll',
'evtaskapi.dll',
'exsec32.dll',
'ext-ms-onecore-appmodel-staterepository-cache-l1-1-0.dll',
'ext-ms-win-com-ole32-l1-1-0.dll',
'ext-ms-win-com-ole32-l1-1-1.dll',
'ext-ms-win-core-iuri-l1-1-0.dll',
'ext-ms-win-devmgmt-policy-l1-1-0.dll',
'ext-ms-win-devmgmt-policy-l1-1-1.dll',
'ext-ms-win-edputil-policy-l1-1-0.dll',
'ext-ms-win-els-elscore-l1-1-0.dll',
'ext-ms-win-feclient-encryptedfile-l1-1-0.dll',
'ext-ms-win-gdi-dc-create-l1-1-0.dll',
'ext-ms-win-gdi-dc-l1-2-0.dll',
'ext-ms-win-gdi-draw-l1-1-0.dll',
'ext-ms-win-gdi-draw-l1-1-1.dll',
'ext-ms-win-gdi-font-l1-1-0.dll',
'ext-ms-win-kernel32-package-l1-1-0.dll',
'ext-ms-win-mrmcorer-resmanager-l1-1-0.dll',
'ext-ms-win-networking-wlanapi-l1-1-0.dll',
'ext-ms-win-ntuser-dialogbox-l1-1-0.dll',
'ext-ms-win-ntuser-draw-l1-1-0.dll',
'ext-ms-win-ntuser-gui-l1-1-0.dll',
'ext-ms-win-ntuser-keyboard-l1-1-0.dll',
'ext-ms-win-ntuser-message-l1-1-0.dll',
'ext-ms-win-ntuser-message-l1-1-1.dll',
'ext-ms-win-ntuser-misc-l1-1-0.dll',
'ext-ms-win-ntuser-private-l1-1-1.dll',
'ext-ms-win-ntuser-rectangle-ext-l1-1-0.dll',
'ext-ms-win-ntuser-synch-l1-1-0.dll',
'ext-ms-win-ntuser-uicontext-ext-l1-1-0.dll',
'ext-ms-win-ntuser-window-l1-1-0.dll',
'ext-ms-win-ntuser-window-l1-1-1.dll',
'ext-ms-win-ntuser-window-l1-1-2.dll',
'ext-ms-win-ntuser-window-l1-1-3.dll',
'ext-ms-win-ntuser-window-l1-1-4.dll',
'ext-ms-win-ntuser-windowclass-l1-1-0.dll',
'ext-ms-win-ntuser-windowstation-l1-1-0.dll',
'ext-ms-win-ole32-bindctx-l1-1-0.dll',
'ext-ms-win-rtcore-gdi-devcaps-l1-1-0.dll',
'ext-ms-win-rtcore-gdi-object-l1-1-0.dll',
'ext-ms-win-rtcore-gdi-rgn-l1-1-0.dll',
'ext-ms-win-rtcore-ntuser-cursor-l1-1-0.dll',
'ext-ms-win-rtcore-ntuser-dc-access-l1-1-0.dll',
'ext-ms-win-rtcore-ntuser-synch-ext-l1-1-0.dll',
'ext-ms-win-rtcore-ntuser-syscolors-l1-1-0.dll',
'ext-ms-win-rtcore-ntuser-sysparams-l1-1-0.dll',
'ext-ms-win-rtcore-ntuser-window-ext-l1-1-0.dll',
'ext-ms-win-security-credui-l1-1-0.dll',
'ext-ms-win-session-usermgr-l1-1-0.dll',
'ext-ms-win-session-usertoken-l1-1-0.dll',
'ext-ms-win-session-winsta-l1-1-0.dll',
'ext-ms-win-session-wtsapi32-l1-1-0.dll',
'ext-ms-win-shell-shell32-l1-2-0.dll',
'ext-ms-win-shell-shell32-l1-2-1.dll',
'ext-ms-win-shell32-shellfolders-l1-1-0.dll',
'ext-ms-win-uxtheme-themes-l1-1-0.dll',
'ext-ms-win-wer-reporting-l1-1-0.dll',
'ext-ms-win-wevtapi-eventlog-l1-1-0.dll',
'ext_iccore73.dll',
'ext_iccore73d.dll',
'extensionsystem.dll',
'extensionsystem4.dll',
'f3biio.dll',
'f3bilpio.dll',
'f3biprct.dll',
'fastprox.dll',
'faultrep.dll',
'favorites.dll',
'fbclient.dll',
'fdframework.dll',
'fdrextensions.dll',
'fdrplugin.dll',
'ffmpeg.dll',
'ffmpegsumo.dll',
'ffwrapper.dll',
'fineobj.dll',
'firewallapi.dll',
'fltlib.dll',
'fmod.dll',
'fmod_event.dll',
'fmodex.dll',
'fontsub.dll',
'foundation.dll',
'framedynos.dll',
'framework.dll',
'frameworkextensions.dll',
'freecadapp.dll',
'freecadbase.dll',
'freecadgui.dll',
'freeimage.dll',
'freetype.dll',
'friextensions.dll',
'ftd2xx.dll',
'functiondialog.dll',
'fury3_mtc.dll',
'fwpuclnt.dll',
'gamedata.dll',
'gdacutl.dll',
'gdal204.dll',
'gemrb_core.dll',
'gemx_nvt73d.dll',
'gendata73d.dll',
'geom.dll',
'geometry.dll',
'gf.dll',
'gfsdk_aftermath_lib.x64.dll',
'gfx.dll',
'gimp-1.1.dll',
'glew32.dll',
'glib-2.0.dll',
'globals.dll',
'glog.dll',
'glu32.dll',
'glut32.dll',
'gmsecapi.dll',
'googledesktopcommon.dll',
'gpu_solver.dll',
'granny2.dll',
'graphic.dll',
'graphicdata.dll',
'graphics.dll',
'groovenew.dll',
'grooveutil.dll',
'gsdmain.dll',
'gsiou.dll',
'gsl73.dll',
'gsl73d.dll',
'gsroot.dll',
'gufuncs.dll',
'gui.dll',
'gui_oiv.dll',
'guibase.dll',
'guicore.dll',
'happy.dll',
'hccutils.dll',
'hdf5.dll',
'hid.dll',
'hlink.dll',
'hlog.dll',
'hpr.dll',
'htmlayout.dll',
'httpapi.dll',
'i3core-w64r-20-1.dll',
'i18nisolang1msc.dll',
'i18nlangtag.dll',
'ibmtss.dll',
'icdbif73.dll',
'icdbif73d.dll',
'icmain73.dll',
'icmain73d.dll',
'icmp.dll',
'iconv.dll',
'icsarith.dll',
'icsform.dll',
'icsgenl.dll',
'icudt55.dll',
'icudt56.dll',
'icuin55.dll',
'icuin56.dll',
'icuin63.dll',
'icuin64.dll',
'icuin65.dll',
'icuio63.dll',
'icuio64.dll',
'icuio65.dll',
'icuserc73.dll',
'icuserc73d.dll',
'icutu43.dll',
'icuuc40.dll',
'icuuc43.dll',
'icuuc55.dll',
'icuuc56.dll',
'icuuc58.dll',
'icuuc63.dll',
'icuuc64.dll',
'icuuc65.dll',
'ieadvpack.dll',
'ieframe.dll',
'iertutil.dll',
'ieshims.dll',
'ieui.dll',
'igx.dll',
'ihszras.dll',
'ijl15.dll',
'iliuni32.dll',
'image.dll',
'imagerenderer.dll',
'imgutil.dll',
'imm32.dll',
'imsconnect.dll',
'inetcomm.dll',
'instapi110.dll',
'intl.dll',
'inxs.dll',
'iocptcp.dll',
'iocpudp.dll',
'iolibu.dll',
'iopersisteddataaccess.dll',
'ipc.dll',
'irisctl.dll',
'isxutils.dll',
'itaxsub.dll',
'iup.dll',
'iuplua52.dll',
'j9thr26.dll',
'jansson.dll',
'java.dll',
'jawt.dll',
'jbase5.dll',
'jdproxy.dll',
'js0fm.dll',
'js0group.dll',
'js32.dll',
'jsoncpp.dll',
'juli.dll',
'jvm.dll',
'kdownload.dll',
'kelib.dll',
'kernel.dll',
'kernelbase.dll',
'kernelutil.dll',
'kf5configcore.dll',
'kf5coreaddons.dll',
'kf5i18n.dll',
'kf5kiocore.dll',
'kf5purpose.dll',
'knowledgeitf.dll',
'kool_ade.dll',
'kpathsea.dll',
'kpathsea600.dll',
'kpathsea630.dll',
'kpathsea630w64.dll',
'ksffoundation.dll',
'kso.dll',
'ksolite.dll',
'ktmw32.dll',
'ku_http.dll',
'kwdatadef.dll',
'kwlib.dll',
'kwlog.dll',
'kwmodconfig.dll',
'kwmusiccore.dll',
'labcontrols.dll',
'labgen.dll',
'labutils.dll',
'language.dll',
'languages.dll',
'lay100.dll',
'lay200.dll',
'ldap60.dll',
'lgpllibs.dll',
'lib.io.char.dll',
'lib.stdc.dll',
'lib.syslog.dll',
'libabiword-3.0.dll',
'libadm_core6.dll',
'libadm_coreaudio6.dll',
'libadm_coreaudioencoder6.dll',
'libadm_coredemuxer6.dll',
'libadm_coreimage6.dll',
'libadm_coremuxer6.dll',
'libadm_coreui6.dll',
'libadm_coreutils6.dll',
'libadm_corevideoencoder6.dll',
'libadm_corevideofilter6.dll',
'libadm_uiqt56.dll',
'libapr-1.dll',
'libapriconv-1-0.dll',
'libapriconv-1.dll',
'libaprutil-1.dll',
'libasapmgr-mingw.dll',
'libatk-1.0-0.dll',
'libbabl-0.1-0.dll',
'libbase_utils.dll',
'libbind9.dll',
'libboost_filesystem-mt.dll',
'libboost_system-mt.dll',
'libboost_thread_win32-mt.dll',
'libbz2-1.dll',
'libcairo-2.dll',
'libcamel-1.2-19.dll',
'libcapstone.dll',
'libcasadi.dll',
'libcef.dll',
'libcocos2d.dll',
'libcrypto-1_1-x64.dll',
'libcrypto-1_1.dll',
'libcrypto10.dll',
'libcurl-4.dll',
'libcurl.dll',
'libcxxwrap_julia.dll',
'libdarktable.dll',
'libdns.dll',
'libeay32.dll',
'libecore-1.dll',
'libedataserver-1.2-14.dll',
'libedataserverui-1.2-11.dll',
'libegl.dll',
'libeina-1.dll',
'libenchant.dll',
'libeutil-0.dll',
'libevas-1.dll',
'libexpat-1.dll',
'libexpat.dll',
'libfbxsdk.dll',
'libfclasses.dll',
'libfontconfig-1.dll',
'libfreetype-6.dll',
'libgcc_s_dw2-1.dll',
'libgcc_s_seh-1.dll',
'libgcc_s_sjlj-1.dll',
'libgcrypt-20.dll',
'libgdk-3-0.dll',
'libgdk-win32-2.0-0.dll',
'libgdk_pixbuf-2.0-0.dll',
'libgegl-0.2-0.dll',
'libgegl-0.3-0.dll',
'libgegl-0.4-0.dll',
'libgfortran-3.dll',
'libgimp-2.0-0.dll',
'libgimpbase-2.0-0.dll',
'libgimpcolor-2.0-0.dll',
'libgimpui-2.0-0.dll',
'libgimpwidgets-2.0-0.dll',
'libgio-2.0-0.dll',
'libglesv2.dll',
'libglib-2.0-0.dll',
'libglibmm-2.4-1.dll',
'libgmodule-2.0-0.dll',
'libgmp-10.dll',
'libgnutls-30.dll',
'libgobject-2.0-0.dll',
'libgomp-1.dll',
'libgpac.dll',
'libgpg-error-0.dll',
'libgphoto2-6.dll',
'libgphoto2_port-12.dll',
'libgraphicsmagick-3.dll',
'libgrass_dbmibase.7.7.dll',
'libgrass_gis.7.4.0.dll',
'libgrass_gis.7.4.4.dll',
'libgrass_gis.7.6.0.dll',
'libgrass_gis.7.6.dll',
'libgrass_gis.7.7.dll',
'libgrass_raster.7.4.0.dll',
'libgrass_raster.7.7.dll',
'libgrass_vector.7.7.dll',
'libgstaudio-1.0-0.dll',
'libgstbase-0.10-0.dll',
'libgstbase-1.0-0.dll',
'libgstbase-1.5-0.dll',
'libgstpbutils-1.0-0.dll',
'libgstreamer-0.10-0.dll',
'libgstreamer-1.0-0.dll',
'libgstreamer-1.5-0.dll',
'libgsttag-1.0-0.dll',
'libgstvideo-1.0-0.dll',
'libgtk-3-0.dll',
'libgtk-win32-2.0-0.dll',
'libguide40.dll',
'libgwyapp2-0.dll',
'libgwyddion2-0.dll',
'libgwymodule2-0.dll',
'libgwyprocess2-0.dll',
'libharfbuzz-0.dll',
'libhpdf_140.dll',
'libhttpd.dll',
'libiconv-2.dll',
'libifcoremd.dll',
'libifportmd.dll',
'libindexing.dll',
'libintl-8.dll',
'libintl.dll',
'libiomp5md.dll',
'libisc.dll',
'libisccfg.dll',
'libite-mingw.dll',
'libitkcommon-4.13.dll',
'libitkioimagebase-4.13.dll',
'libitksys-4.13.dll',
'libitkvnl-4.13.dll',
'libjam.dll',
'libjpeg-8.dll',
'libjpeg-62.dll',
'libkdecore.dll',
'libkdeui.dll',
'libkdevplatforminterfaces.dll',
'libkf5configcore.dll',
'libkf5coreaddons.dll',
'libkf5i18n.dll',
'libkf5widgetsaddons.dll',
'libkio.dll',
'libkritacommand.dll',
'libkritaflake.dll',
'libkritaglobal.dll',
'libkritaimage.dll',
'libkritapigment.dll',
'libkritaui.dll',
'libkritawidgets.dll',
'libkritawidgetutils.dll',
'libldns-2.dll',
'liblogicalaccess.dll',
'liblzo2-2.dll',
'libmagickcore-7.q16hdri-5.dll',
'libmagickcore-7.q16hdri-6.dll',
'libmex.dll',
'libmitsuba-core.dll',
'libmitsuba-render.dll',
'libmlt-6.dll',
'libmmd.dll',
'libmpr.dll',
'libmwfl.dll',
'libmwi18n.dll',
'libmwservices.dll',
'libmx.dll',
'libmysql.dll',
'libnbbase.dll',
'libnettle-6.dll',
'libnspr4.dll',
'libnwcore.dll',
'libnwshared.dll',
'libopenblas.dll',
'libopencv_calib3d341.dll',
'libopencv_calib3d342.dll',
'libopencv_core340.dll',
'libopencv_core341.dll',
'libopencv_core342.dll',
'libopencv_core343.dll',
'libopencv_core400.dll',
'libopencv_core401.dll',
'libopencv_core411.dll',
'libopencv_datasets341.dll',
'libopencv_datasets342.dll',
'libopencv_features2d342.dll',
'libopencv_highgui340.dll',
'libopencv_highgui341.dll',
'libopencv_highgui342.dll',
'libopencv_highgui343.dll',
'libopencv_highgui400.dll',
'libopencv_highgui401.dll',
'libopencv_highgui411.dll',
'libopencv_imgcodecs341.dll',
'libopencv_imgcodecs342.dll',
'libopencv_imgcodecs343.dll',
'libopencv_imgcodecs400.dll',
'libopencv_imgcodecs401.dll',
'libopencv_imgcodecs411.dll',
'libopencv_imgproc341.dll',
'libopencv_imgproc342.dll',
'libopencv_imgproc343.dll',
'libopencv_imgproc400.dll',
'libopencv_imgproc401.dll',
'libopencv_imgproc411.dll',
'libopencv_video342.dll',
'libopencv_videoio342.dll',
'libopencv_videoio343.dll',
'libopenthreads.dll',
'libopenthreadsd.dll',
'liborg_blueberry_osgi.dll',
'liborg_blueberry_ui.dll',
'libosg.dll',
'libosgd.dll',
'libosgdb.dll',
'libosgdbd.dll',
'libosgearth.dll',
'libosgearthd.dll',
'libosgearthutil.dll',
'libosgga.dll',
'libosggad.dll',
'libosgtext.dll',
'libosgutil.dll',
'libosgutild.dll',
'libosgviewer.dll',
'libosgviewerd.dll',
'libosgwidget.dll',
'libp3dtool.dll',
'libp3dtoolconfig.dll',
'libpal.dll',
'libpanda.dll',
'libpandaegg.dll',
'libpandaexpress.dll',
'libpango-1.0-0.dll',
'libpangocairo-1.0-0.dll',
'libpart.dll',
'libpcre-1.dll',
'libpcre.dll',
'libpidgin.dll',
'libpixman-1-0.dll',
'libplc4.dll',
'libplds4.dll',
'libpng16-16.dll',
'libpng16.dll',
'libpom.dll',
'libportability.dll',
'libpq.dll',
'libprotobuf.dll',
'libpulsecommon-7.1.dll',
'libpulsecore-7.1.dll',
'libpurple.dll',
'libpython2.7.dll',
'libpython3.6m.dll',
'libpython3.7m.dll',
'libpython3.8m.dll',
'libqt5core.dll',
'libqt5gui.dll',
'libquadmath-0.dll',
'librazorcat-mingw.dll',
'libredwg-0.dll',
'libreq.dll',
'librudiments.dll',
'libsasl.dll',
'libsndfile-1.dll',
'libsnooper.dll',
'libsodium.dll',
'libspreadsheet-1-10-17.dll',
'libsqlite3-0.dll',
'libsqlite3.dll',
'libssh2.dll',
'libssl-1_1-x64.dll',
'libssl-1_1.dll',
'libssp-0.dll',
'libstdc++-6.dll',
'libsti32.dll',
'libsvn_delta-1.dll',
'libsvn_subr-1.dll',
'libswipl.dll',
'libsyss.dll',
'libsystre-0.dll',
'libtc.dll',
'libtccore.dll',
'libtcinit.dll',
'libtcod.dll',
'libtiff-5.dll',
'libtitania.dll',
'libtulip-core-4.8.dll',
'libtulip-core-4.9.dll',
'libtulip-core-4.10.dll',
'libtulip-core-5.0.dll',
'libtulip-core-5.2.dll',
'libtulip-core-5.3.dll',
'libtulip-gui-4.8.dll',
'libtulip-ogl-4.8.dll',
'libtulip-ogl-5.2.dll',
'libtypetable-mingw.dll',
'libugutils.dll',
'libunity_core_shared.dll',
'libusb-1.0.dll',
'libusb0.dll',
'libut.dll',
'libuv.dll',
'libvlc.dll',
'libvlccore.dll',
'libvorbisfile.dll',
'libvorbisfile_64.dll',
'libvtkcommoncore-8.1.dll',
'libvtkcommoncorepython27d-8.1.dll',
'libvtkcommondatamodel-8.1.dll',
'libvtkcommondatamodelpython27d-8.1.dll',
'libvtkcommonexecutionmodel-8.1.dll',
'libvtkcommonexecutionmodelpython27d-8.1.dll',
'libvtkcommonmath-8.1.dll',
'libvtkcommonmisc-8.1.dll',
'libvtkcommonsystem-8.1.dll',
'libvtkcommontransforms-8.1.dll',
'libvtkfilterscore-8.1.dll',
'libvtkfiltersgeneral-8.1.dll',
'libvtkfilterssources-8.1.dll',
'libvtkimagingcore-8.1.dll',
'libvtkiocore-8.1.dll',
'libvtkrenderingcore-8.1.dll',
'libvtkrenderingcorepython27d-8.1.dll',
'libvtksys-8.1.dll',
'libvtkwrappingpython27core-8.1.dll',
'libwinpthread-1.dll',
'libwireshark.dll',
'libwsutil.dll',
'libxl.dll',
'libxmccore.dll',
'libxml-mingw.dll',
'libxml2-2.dll',
'libxml2.dll',
'libxmlsec.dll',
'libxslt.dll',
'license.dll',
'lima-common-data.dll',
'lima-common-factory.dll',
'lima-common-fsaaccess.dll',
'lima-common-mediaprocessors.dll',
'lima-common-mediaticdata.dll',
'lima-common-misc.dll',
'lima-common-processunitframework.dll',
'lima-common-time.dll',
'lima-common-tools.dll',
'lima-common-xmlconfigurationfiles.dll',
'lima-lp-analysisdict.dll',
'lima-lp-analysishandlers.dll',
'lima-lp-annotationgraph.dll',
'lima-lp-automaton.dll',
'lima-lp-bagofwords.dll',
'lima-lp-client.dll',
'lima-lp-flattokenizer.dll',
'lima-lp-lineartextrepresentation.dll',
'lima-lp-linguisticanalysisstructure.dll',
'lima-lp-linguisticdata.dll',
'lima-lp-linguisticprocessors.dll',
'lima-lp-linguisticresources.dll',
'lima-lp-misc.dll',
'lima-lp-propertycode.dll',
'lima-lp-specificentities.dll',
'lima-lp-syntacticanalysis.dll',
'lima-lp-textsegmentation.dll',
'linkinfo.dll',
'livelog.dll',
'locale.dll',
'log.dll',
'log4cplus.dll',
'log4cplusd.dll',
'log4cxx.dll',
'logger.dll',
'logging.dll',
'loggingplatform.dll',
'logmanager.dll',
'logoncli.dll',
'ltkrn14n.dll',
'ltkrn15u.dll',
'ltkrnu.dll',
'lua.dll',
'lua5.1.dll',
'lua51.dll',
'lua53.dll',
'lw.dll',
'lxgui.dll',
'mahrctl.dll',
'mahrtom.dll',
'mahruser.dll',
'mapi32.dll',
'marbase.dll',
'math.dll',
'maxapi.dll',
'maxutil.dll',
'mcereghandler.dll',
'mdaelib.dll',
'mediafoundation.dll',
'mediainfo.dll',
'mediatypes.dll',
'memory.dll',
'mergedlo.dll',
'mesh.dll',
'message.dll',
'messagepublisher.dll',
'mf.dll',
'mfc42d.dll',
'mfc70.dll',
'mfc70u.dll',
'mfc71.dll',
'mfc71u.dll',
'mfc80.dll',
'mfc80u.dll',
'mfc80xu.dll',
'mfc90.dll',
'mfc90u.dll',
'mfc100.dll',
'mfc100u.dll',
'mfc110.dll',
'mfc110u.dll',
'mfc120.dll',
'mfc120u.dll',
'mfc140.dll',
'mfc140d.dll',
'mfc140u.dll',
'mfc140ud.dll',
'mfcce300.dll',
'mfco42d.dll',
'mfplat.dll',
'mfplay.dll',
'mfreadwrite.dll',
'mgmtapi.dll',
'mi.dll',
'miktex209-app.dll',
'miktex209-core.dll',
'miktex209-next-app.dll',
'miktex209-next-core.dll',
'miktex209-next-util.dll',
'miktex209-util.dll',
'mingwm10.dll',
'minizip.dll',
'misc.dll',
'mitkcoreext.dll',
'mkl_rt.dll',
'mlang.dll',
'mmdevapi.dll',
'mmfs2.dll',
'mmmsgbundle.dll',
'mobilephoneenv.dll',
'module_lifetime.dll',
'module_structures.dll',
'mosaiccore.dll',
'mosifs32.dll',
'movaviio.dll',
'mozalloc.dll',
'mozavutil.dll',
'mozglue.dll',
'mqtutil.dll',
'mqutil.dll',
'mrmcorer.dll',
'mrt100_app.dll',
'mrtrix-365b606c17c8f6f68f476f1f567a184afcddcb5c.dll',
'msasn1.dll',
'mscoree.dll',
'msdart.dll',
'msdmo.dll',
'msdrm.dll',
'msfeeds.dll',
'mshtml.dll',
'msi.dll',
'msiltcfg.dll',
'msl_all-dll90_x86.dll',
'msls31.dll',
'mso.dll',
'mso20imm.dll',
'mso20win32client.dll',
'mso30imm.dll',
'mso30win32client.dll',
'mso40uiwin32client.dll',
'mso97.dll',
'mso98win32client.dll',
'mso99lwin32client.dll',
'msocf.dll',
'mss32.dll',
'mstores.dll',
'msvbvm50.dll',
'msvcp70.dll',
'msvcp71.dll',
'msvcp90.dll',
'msvcp90d.dll',
'msvcp100.dll',
'msvcp100d.dll',
'msvcp110.dll',
'msvcp110_win.dll',
'msvcp110d.dll',
'msvcp120.dll',
'msvcp120_app.dll',
'msvcp120d.dll',
'msvcp120d_app.dll',
'msvcp140.dll',
'msvcp140_app.dll',
'msvcp140d.dll',
'msvcp140d_app.dll',
'msvcp_win.dll',
'msvcr71.dll',
'msvcr71d.dll',
'msvcr80d.dll',
'msvcr90.dll',
'msvcr90d.dll',
'msvcr100.dll',
'msvcr100d.dll',
'msvcr110.dll',
'msvcr110d.dll',
'msvcr120.dll',
'msvcr120_app.dll',
'msvcr120_clr0400.dll',
'msvcr120d.dll',
'msvcrt-ruby240.dll',
'msvcrt-ruby250.dll',
'msvcrt-ruby260.dll',
'msvcrt20.dll',
'msvcrtd.dll',
'msys-2.0.dll',
'msys-gcc_s-1.dll',
'msys-iconv-2.dll',
'msys-intl-8.dll',
'msys-python3.6m.dll',
'msys-ruby240.dll',
'msys-ruby260.dll',
'msys-svn_subr-1-0.dll',
'msys-z.dll',
'msys-znc-1.7.dll',
'mtslib.dll',
'mvcl14n.dll',
'mycom.dll',
'nagscreen.dll',
'namedobjects.dll',
'ncaaudiodev.dll',
'ncobjapi.dll',
'ncrypt.dll',
'ndfapi.dll',
'net.dll',
'netbw32.dll',
'netrap.dll',
'netutils.dll',
'network.dll',
'newdev.dll',
'nghttp2.dll',
'ninput.dll',
'nitroplatform12.dll',
'nitroplatform13.dll',
'nlsreportgenerator4807.dll',
'nnotes.dll',
'node.dll',
'normaliz.dll',
'npdf.dll',
'npparstb.dll',
'npremodule.dll',
'nscp_protobuf.dll',
'nsi.dll',
'nss3.dll',
'nssutil3.dll',
'nsw2lib.dll',
'ntdsapi.dll',
'ntshrui.dll',
'nuke83.dll',
'nvcloth_x64.dll',
'nvcuda.dll',
'nw_elf.dll',
'o3pubfunc.dll',
'oart.dll',
'oartodf.dll',
'objc.dll',
'objectmodelersystem.dll',
'obs.dll',
'occache.dll',
'oci.dll',
'odbc32.dll',
'odbccp32.dll',
'og81as.dll',
'og701asuc.dll',
'ogg.dll',
'oglmanager.dll',
'ogremain.dll',
'ogshell.dll',
'ogutil.dll',
'oisapp.dll',
'olepro32.dll',
'olmapi32.dll',
'opa.dll',
'openal32.dll',
'opencl.dll',
'openflipperpluginlib.dll',
'opengl32.dll',
'openglswitcherapi.dll',
'openmaya.dll',
'openmayaanim.dll',
'openvr_api.dll',
'oplib.dll',
'os.dll',
'osf.dll',
'osfshared.dll',
'osfui.dll',
'osii_messaging_c.dll',
'osii_sarc_c.dll',
'osii_system_c.dll',
'osppc.dll',
'osppcext.dll',
'otgdllview.dll',
'outlrpc.dll',
'owl52f.dll',
'packet.dll',
'palsstorage.dll',
'paramblk2.dll',
'parto.dll',
'pastatus.dll',
'pbvm90.dll',
'pbvm125.dll',
'pbvm170.dll',
'pcre.dll',
'pcwum.dll',
'pd.dll',
'pdfcore.dll',
'pdh.dll',
'perl528.dll',
'pgort140.dll',
'php5ts.dll',
'php7.dll',
'php7ts.dll',
'php8.dll',
'php8ts.dll',
'physx3_x64.dll',
'physx3_x86.dll',
'physx3characterkinematicdebug_x86.dll',
'physx3common_x64.dll',
'physx3common_x86.dll',
'physx3cooking_x64.dll',
'physxloader.dll',
'pig327.dll',
'plds4.dll',
'plib32.dll',
'pmruntime.dll',
'pncrt.dll',
'pocofoundation.dll',
'policymanager.dll',
'postproc-55.dll',
'powrprof.dll',
'ppcore.dll',
'pr0ctls.dll',
'pr0defs.dll',
'pr0rcci.dll',
'prgbase.dll',
'prgcore.dll',
'prldap60.dll',
'prm.dll',
'prntvpt.dll',
'procint.dll',
'procommon.dll',
'productutilities.dll',
'profapi.dll',
'progreso.dll',
'progressivehullcudalib.dll',
'projectexplorer4.dll',
'propsys.dll',
'protocol.dll',
'pslutils.dll',
'psplog.dll',
'psxlib.dll',
'pthread.dll',
'pthreadgc2.dll',
'pthreadvc2.dll',
'ptimer.dll',
'ptrace34.dll',
'public.dll',
'pubsub.dll',
'pxfoundation_x64.dll',
'pxpvdsdk_x64.dll',
'pyside2.abi3.dll',
'python3.dll',
'python23.dll',
'python24.dll',
'python25.dll',
'python26.dll',
'python27.dll',
'python32.dll',
'python33.dll',
'python34.dll',
'python35.dll',
'python36.dll',
'python37.dll',
'python38.dll',
'python39.dll',
'pywintypes27.dll',
'pywintypes39.dll',
'qbutilities.dll',
'qinetwork.dll',
'qiutils.dll',
'qoom19_mtc.dll',
'qqlivebase.dll',
'qqmusiccommon.dll',
'qt5concurrent.dll',
'qt5core.dll',
'qt5cored.dll',
'qt5dbus.dll',
'qt5designer.dll',
'qt5gui.dll',
'qt5guid.dll',
'qt5help.dll',
'qt5location.dll',
'qt5multimedia.dll',
'qt5multimediad.dll',
'qt5multimediawidgets.dll',
'qt5network.dll',
'qt5networkd.dll',
'qt5opengl.dll',
'qt5positioning.dll',
'qt5positioningd.dll',
'qt5printsupport.dll',
'qt5qml.dll',
'qt5qmld.dll',
'qt5quick.dll',
'qt5quickcontrols2.dll',
'qt5quickd.dll',
'qt5quicktemplates2.dll',
'qt5quickwidgets.dll',
'qt5script.dll',
'qt5sensors.dll',
'qt5serialport.dll',
'qt5sql.dll',
'qt5svg.dll',
'qt5test.dll',
'qt5webchannel.dll',
'qt5webengine.dll',
'qt5webenginecore.dll',
'qt5webenginewidgets.dll',
'qt5webkit.dll',
'qt5webkitwidgets.dll',
'qt5websockets.dll',
'qt5widgets.dll',
'qt5widgetsd.dll',
'qt5winextras.dll',
'qt5xml.dll',
'qt5xmlpatterns.dll',
'qt53dcore.dll',
'qt53dcored.dll',
'qt53dinput.dll',
'qt53drender.dll',
'qt53drenderd.dll',
'qtcore4.dll',
'qtcored4.dll',
'qtdbus4.dll',
'qtdeclarative4.dll',
'qtgui4.dll',
'qtguid4.dll',
'qtintf70.dll',
'qtnetwork4.dll',
'qtopengl4.dll',
'qtscript4.dll',
'qtsql4.dll',
'qtsupport4.dll',
'qtsvg4.dll',
'qtwebkit4.dll',
'qtxml4.dll',
'qtxmlpatterns4.dll',
'quazip.dll',
'quest3_mtc.dll',
'qwave.dll',
'qwutil.dll',
'r.dll',
'rainmeter.dll',
'rapi.dll',
'rasdlg.dll',
'rblas.dll',
'rbsha.dll',
'rdctcpip.dll',
'rds32.dll',
'react-native-win32.dll',
'reagent.dll',
'records.dll',
'resultpage.dll',
'rlapack.dll',
'rmclient.dll',
'role3d.dll',
'room616_mtc.dll',
'rpcrt4.dll',
'rps32.dll',
'rstrtmgr.dll',
'rtutils.dll',
'rvcore.dll',
'rxffr.dll',
'rxruntim.dll',
'sal3.dll',
'salhelper3msc.dll',
'samcli.dll',
'sandboxbroker.dll',
'saphirdll.dll',
'sbiedll.dll',
'scculib.dll',
'sccut.dll',
'scesrv.dll',
'schedularnet.dll',
'scint.dll',
'scnpst32.dll',
'scnpst64.dll',
'scnpst64c.dll',
'sdl.dll',
'sdl2.dll',
'sdl2_gfx.dll',
'sdl2_image.dll',
'sdl2_mixer.dll',
'sdl2_ttf.dll',
'sdl_image.dll',
'sdl_mixer.dll',
'sdl_ttf.dll',
'secdb.dll',
'secur32.dll',
'security.dll',
'sensapi.dll',
'serialization.dll',
'settings.dll',
'setupapi.dll',
'setupengine.dll',
'sginfra.dll',
'shared.dll',
'sharedlibrary.dll',
'sharedu.dll',
'shcore.dll',
'shiboken2.abi3.dll',
'sibus.dll',
'sicl32.dll',
'siclrpc.dll',
'sicollection.dll',
'sicomm.dll',
'sidatadesc.dll',
'siexception.dll',
'sirow.dll',
'sirulereturn.dll',
'sistruct.dll',
'skywlib.dll',
'smime3.dll',
'smrt32.dll',
'sndvolsso.dll',
'so.5.5.23.dll',
'so.5.5.24.1.dll',
'so.5.5.24.2.dll',
'so.5.5.24.dll',
'softwareupdatefiles.dll',
'sos.dll',
'sotmi.dll',
'spaacis.dll',
'spbasic.dll',
'speedtreert.dll',
'spell32.dll',
'splutilities.dll',
'spoolss.dll',
'spp.dll',
'sql.dll',
'sqlite.dll',
'sqlite3.dll',
'sqmapi.dll',
'srpapi.dll',
'srvcli.dll',
'ssl3.dll',
'ssleay32.dll',
'ssm_api.dll',
'sspicli.dll',
'statsjunkysystem.dll',
'stddll32.dll',
'stddll40.dll',
'steam_api.dll',
'steam_api64.dll',
'sti.dll',
'stlpmt45.dll',
'stlport-w64r-20-1.dll',
'stlport.5.1.dll',
'stlport.5.2.dll',
'stlport_vc7145.dll',
'storm.dll',
'studyrunner.dll',
'sub_ctrl.dll',
'svc.binary.dll',
'svc.filesys.dll',
'svc.node.dll',
'svlmi.dll',
'svml_dispmd.dll',
'svt.dll',
'svtmi.dll',
'swipl.dll',
'swresample-1.dll',
'swresample-2.dll',
'swresample-3.dll',
'swscale-0.dll',
'swscale-2.dll',
'swscale-3.dll',
'swscale-4.dll',
'swscale-5.dll',
'systask.dll',
'system.dll',
'systemeventsbrokerclient.dll',
'systemutilities.dll',
't2embed.dll',
'tabsys.dll',
'taminstance.dll',
'tasks_core.dll',
'tbb.dll',
'tbbmalloc.dll',
'tbs.dll',
'tbstdobjs.dll',
'td_alloc_4.03_14.dll',
'td_alloc_20.6_15.dll',
'td_alloc_20.8_14.dll',
'td_dbcore_20.6_15.dll',
'td_ge_20.6_15.dll',
'td_root.dll',
'td_root_4.03_14.dll',
'td_root_20.8_14.dll',
'tdh.dll',
'tdl100.dll',
'tdl200.dll',
'tdl201.dll',
'tdl210.dll',
'tdl290.dll',
'telemetry.dll',
'ter32.dll',
'texteditor.dll',
'texteditor4.dll',
'textomsg.dll',
'tgp_monitor.dll',
'thrdutil.dll',
'tier0.dll',
'tier0_s.dll',
'tinyxml.dll',
'tkbrep.dll',
'tkernel.dll',
'tkg2d.dll',
'tkg3d.dll',
'tkgeomalgo.dll',
'tkgeombase.dll',
'tkmath.dll',
'tkmi.dll',
'tkshhealing.dll',
'tktopalgo.dll',
'tl.dll',
'tllo.dll',
'tlmi.dll',
'tmfoundation_gt2_release.dll',
'tmgeneral_release.dll',
'tmmetadata_release.dll',
'tmpdlg10.dll',
'tmpublic_algorithmframework_release.dll',
'tmpublic_core_release.dll',
'tmpublic_maskstorage_release.dll',
'tmpublic_math_release.dll',
'tmpublic_serialization_release.dll',
'tmpublic_voxelstorage_release.dll',
'tn3dls.dll',
'tools.dll',
'toom_mtc.dll',
'tp.dll',
'trace.dll',
'tracelog-4-0.dll',
'tracker.dll',
'traffic.dll',
'trayicon2.dll',
'truss100.dll',
'truss200.dll',
'tsduck.dll',
'twinapi.appcore.dll',
'tx_log.dll',
'txinterf.dll',
'txmlutil.dll',
'types16_mtc.dll',
'u.dll',
'u32base.dll',
'u32comm.dll',
'ubs_database.dll',
'ubs_datetime.dll',
'ubs_dbunidesys.dll',
'ubs_error.dll',
'ubs_evlog.dll',
'ubs_objects.dll',
'ubs_trace.dll',
'ubs_varios.dll',
'ucbhelper4msc.dll',
'ucrtbased.dll',
'ue4editor-core.dll',
'ue4editor-coreuobject.dll',
'ue4editor-engine.dll',
'ue4editor-inputcore.dll',
'ue4editor-slate.dll',
'ue4editor-slatecore.dll',
'ue4editor-unrealed.dll',
'ue4editor-unrealenginepython.dll',
'ui-service-provider.dll',
'ui.dll',
'uiautomationcore.dll',
'umpdc.dll',
'umpnpmgr.dll',
'unbcl.dll',
'uniansi.dll',
'unicode.dll',
'units.dll',
'unityplayer.dll',
'unrar.dll',
'url.dll',
'userdata.dll',
'userdatatypehelperutil.dll',
'usevtlog.dll',
'usp10.dll',
'usprfl2d.dll',
'util.dll',
'utildebuglog.dll',
'utilities.dll',
'utility.dll',
'utilmsgbuffer.dll',
'utils.dll',
'utils4.dll',
'utl.dll',
'utlmi.dll',
'uwinapi.dll',
'vaultcli.dll',
'vb40032.dll',
'vboxrt.dll',
'vccorlib140.dll',
'vccorlib140_app.dll',
'vcl.dll',
'vclmi.dll',
'vcomp90.dll',
'vcomp100.dll',
'vcomp120.dll',
'vcomp140.dll',
'vcomp140d.dll',
'vcruntime140.dll',
'vcruntime140_1.dll',
'vcruntime140_app.dll',
'vcruntime140d.dll',
'vcruntime140d_app.dll',
'vctl.dll',
'vdp_rdpvcbridge.dll',
'videoframe.dll',
'virtdisk.dll',
'visitcommon.dll',
'visusdataflow.dll',
'visusdb.dll',
'visusgui.dll',
'visusidx.dll',
'visuskernel.dll',
'vmprotectsdk32.dll',
'vo28gui.dll',
'vo28inet.dll',
'vo28ole.dll',
'vo28run.dll',
'vo28sys.dll',
'vocon3200_asr.dll',
'vocon3200_base.dll',
'vorbisfile.dll',
'vos3msc.dll',
'vproc2.dll',
'vsansi.dll',
'vsstrace.dll',
'vstdlib.dll',
'vstdlib_s.dll',
'vtkcommoncore-8.1.dll',
'vtkcommoncore-pv5.5.dll',
'vtkcommoncore-pv5.7.dll',
'vtkcommondatamodel-8.1.dll',
'vtools2.dll',
'vulkan-1.dll',
'w3btrv7.dll',
'wbtrv32.dll',
'wdscore.dll',
'we60x.dll',
'webres.dll',
'webservices.dll',
'wer.dll',
'wevtapi.dll',
'wfapi.dll',
'wglogin.dll',
'wimgapi.dll',
'win32msgqueue.dll',
'win32u.dll',
'winamp.dll',
'winbrand.dll',
'wincorlib.dll',
'windos95.dll',
'wingenfn.dll',
'winnsi.dll',
'winrtmex.dll',
'winsock.dll',
'winsparkle.dll',
'wintax.dll',
'wintrust.dll',
'winusb.dll',
'wiretap-1.11.0.dll',
'wkscli.dll',
'wlanapi.dll',
'wlanutil.dll',
'wldp.dll',
'wmiclnt.dll',
'wmsgapi.dll',
'wow32.dll',
'wow64.dll',
'wpcap.dll',
'wrapper.dll',
'wrs_module1.dll',
'ws2.dll',
'ws_log.dll',
'wsnmp32.dll',
'wt.dll',
'wthttp.dll',
'wvcore.dll',
'wwutils.dll',
'wxbase30u_gcc_custom.dll',
'wxmsw30u_core_gcc_custom.dll',
'x3daudio1_7.dll',
'x64-msvcrt-ruby240.dll',
'x64-msvcrt-ruby250.dll',
'x64-msvcrt-ruby260.dll',
'x_funms.dll',
'x_pdfms.dll',
'xapofx1_5.dll',
'xbtbase1.dll',
'xbtbase2.dll',
'xcdcpr.dll',
'xcdcprgeometrysources.dll',
'xcdfields.dll',
'xcdimaging.dll',
'xcdmath3d.dll',
'xcdmesh.dll',
'xcdutility.dll',
'xerces-c_2_6.dll',
'xerces-c_2_7.dll',
'xerces-c_3_1.dll',
'xgraphic32.dll',
'ximage.dll',
'xinput1_3.dll',
'xlluaruntime.dll',
'xmllite.dll',
'xpcom_core.dll',
'xppdbgc.dll',
'xppdui.dll',
'xpprt1.dll',
'xppsys.dll',
'xppui2.dll',
'xrcore.dll',
'xtpro.dll',
'xul.dll',
'yaml.dll',
'zip.dll',
'zlib.dll',
'zlib1.dll',
'zlibwapi.dll',
'zoom32.dll',
'ztdata.dll',
'ztframe.dll',
])
WIN7_DLLS = frozenset([
'aaclient.dll',
'accessibilitycpl.dll',
'acctres.dll',
'acledit.dll',
'aclui.dll',
'acppage.dll',
'acproxy.dll',
'actioncenter.dll',
'actioncentercpl.dll',
'actionqueue.dll',
'actxprxy.dll',
'admparse.dll',
'admtmpl.dll',
'adprovider.dll',
'adsldp.dll',
'adsldpc.dll',
'adsmsext.dll',
'adsnt.dll',
'adtschema.dll',
'advapi32.dll',
'advpack.dll',
'aecache.dll',
'aeevts.dll',
'aeinv.dll',
'aelupsvc.dll',
'aepdu.dll',
'alttab.dll',
'amstream.dll',
'amxread.dll',
'apds.dll',
'api-ms-win-core-xstate-l1-1-0.dll',
'apilogen.dll',
'apircl.dll',
'apisetschema.dll',
'apphlpdm.dll',
'appidapi.dll',
'appidpolicyengineapi.dll',
'appidsvc.dll',
'appinfo.dll',
'appmgmts.dll',
'appmgr.dll',
'apss.dll',
'asferror.dll',
'asycfilt.dll',
'atl.dll',
'audiodev.dll',
'audioeng.dll',
'audiokse.dll',
'audioses.dll',
'audiosrv.dll',
'auditcse.dll',
'auditnativesnapin.dll',
'auditpolicygpinterop.dll',
'auditpolmsg.dll',
'authfwcfg.dll',
'authfwgp.dll',
'authfwsnapin.dll',
'authfwwizfwk.dll',
'authui.dll',
'autoplay.dll',
'auxiliarydisplayapi.dll',
'auxiliarydisplayclassinstaller.dll',
'auxiliarydisplaycpl.dll',
'auxiliarydisplaydriverlib.dll',
'auxiliarydisplayservices.dll',
'avicap32.dll',
'avifil32.dll',
'axinstsv.dll',
'azroles.dll',
'azroleui.dll',
'azsqlext.dll',
'basecsp.dll',
'basesrv.dll',
'batmeter.dll',
'batt.dll',
'bcdprov.dll',
'bcdsrv.dll',
'bdehdcfglib.dll',
'bderepair.dll',
'bdesvc.dll',
'bdeui.dll',
'bfe.dll',
'bidispl.dll',
'biocpl.dll',
'biocredprov.dll',
'bitsigd.dll',
'bitsperf.dll',
'bitsprx2.dll',
'bitsprx3.dll',
'bitsprx4.dll',
'bitsprx5.dll',
'bitsprx6.dll',
'blackbox.dll',
'blb_ps.dll',
'blbevents.dll',
'blbres.dll',
'bootres.dll',
'bootstr.dll',
'bootvid.dll',
'brdgcfg.dll',
'bridgeres.dll',
'browcli.dll',
'browser.dll',
'browseui.dll',
'bthci.dll',
'bthmtpcontexthandler.dll',
'bthpanapi.dll',
'bthserv.dll',
'btpanui.dll',
'bwunpairelevated.dll',
'c_g18030.dll',
'c_is2022.dll',
'c_iscii.dll',
'cabview.dll',
'capiprovider.dll',
'capisp.dll',
'cardgames.dll',
'catsrv.dll',
'catsrvps.dll',
'catsrvut.dll',
'cca.dll',
'cdd.dll',
'cdosys.dll',
'certcli.dll',
'certcredprovider.dll',
'certenc.dll',
'certenroll.dll',
'certenrollui.dll',
'certmgr.dll',
'certpoleng.dll',
'certprop.dll',
'cewmdm.dll',
'cfgbkend.dll',
'chkwudrv.dll',
'chsbrkr.dll',
'chtbrkr.dll',
'chxreadingstringime.dll',
'ci.dll',
'cic.dll',
'circoinst.dll',
'clb.dll',
'clbcatq.dll',
'clfsw32.dll',
'cliconfg.dll',
'clusapi.dll',
'cmcfg32.dll',
'cmdial32.dll',
'cmicryptinstall.dll',
'cmifw.dll',
'cmipnpinstall.dll',
'cmlua.dll',
'cmnclim.dll',
'cmpbk32.dll',
'cmstplua.dll',
'cmutil.dll',
'cngaudit.dll',
'cngprovider.dll',
'cnvfat.dll',
'cofiredm.dll',
'colbact.dll',
'colorcnv.dll',
'colorui.dll',
'comcat.dll',
'comctl32.dll',
'comdlg32.dll',
'compstui.dll',
'comrepl.dll',
'comres.dll',
'comsnap.dll',
'comsvcs.dll',
'comuid.dll',
'connect.dll',
'console.dll',
'corpol.dll',
'correngine.dll',
'cpfilters.dll',
'credssp.dll',
'crppresentation.dll',
'crtdll.dll',
'cryptdlg.dll',
'cryptdll.dll',
'cryptext.dll',
'cryptnet.dll',
'cryptsvc.dll',
'cscdll.dll',
'cscmig.dll',
'cscobj.dll',
'cscsvc.dll',
'cscui.dll',
'csrsrv.dll',
'ctl3d32.dll',
'd3d8thk.dll',
'd3d10_1core.dll',
'd3d10core.dll',
'd3d10level9.dll',
'd3d10warp.dll',
'd3dim.dll',
'd3dim700.dll',
'd3dramp.dll',
'dataclen.dll',
'davclnt.dll',
'davhlpr.dll',
'dbnetlib.dll',
'dbnmpntw.dll',
'dciman32.dll',
'ddaclsys.dll',
'ddoiproxy.dll',
'ddores.dll',
'ddrawex.dll',
'defaultlocationcpl.dll',
'defragproxy.dll',
'defragsvc.dll',
'deskadp.dll',
'deskmon.dll',
'deskperf.dll',
'devenum.dll',
'devicecenter.dll',
'devicedisplaystatusmanager.dll',
'devicemetadataparsers.dll',
'devicepairing.dll',
'devicepairingfolder.dll',
'devicepairinghandler.dll',
'devicepairingproxy.dll',
'deviceuxres.dll',
'devrtl.dll',
'dfdts.dll',
'dfscli.dll',
'dfshim.dll',
'dfsshlex.dll',
'dhcpcmonitor.dll',
'dhcpcore.dll',
'dhcpcore6.dll',
'dhcpqec.dll',
'dhcpsapi.dll',
'diagcpl.dll',
'diagperf.dll',
'dimsjob.dll',
'dimsroam.dll',
'diskcopy.dll',
'dispci.dll',
'dispex.dll',
'display.dll',
'dmband.dll',
'dmcompos.dll',
'dmdlgs.dll',
'dmdskmgr.dll',
'dmdskres.dll',
'dmdskres2.dll',
'dmime.dll',
'dmintf.dll',
'dmloader.dll',
'dmocx.dll',
'dmrc.dll',
'dmscript.dll',
'dmstyle.dll',
'dmsynth.dll',
'dmusic.dll',
'dmutil.dll',
'dmvdsitf.dll',
'dnsapi.dll',
'dnscmmc.dll',
'dnsext.dll',
'dnshc.dll',
'dnsrslvr.dll',
'docprop.dll',
'documentperformanceevents.dll',
'dot3api.dll',
'dot3cfg.dll',
'dot3dlg.dll',
'dot3gpclnt.dll',
'dot3gpui.dll',
'dot3hc.dll',
'dot3msm.dll',
'dot3svc.dll',
'dot3ui.dll',
'dpapiprovider.dll',
'dpmodemx.dll',
'dpnaddr.dll',
'dpnathlp.dll',
'dpnet.dll',
'dpnhpast.dll',
'dpnhupnp.dll',
'dpnlobby.dll',
'dps.dll',
'dpwsockx.dll',
'dpx.dll',
'drmmgrtn.dll',
'drmv2clt.dll',
'drprov.dll',
'drt.dll',
'drtprov.dll',
'drttransport.dll',
'ds32gt.dll',
'dsauth.dll',
'dsdmo.dll',
'dshowrdpfilter.dll',
'dskquota.dll',
'dskquoui.dll',
'dsound.dll',
'dsprop.dll',
'dsquery.dll',
'dssec.dll',
'dssenh.dll',
'dsuiext.dll',
'dswave.dll',
'dtsh.dll',
'dwmapi.dll',
'dwmcore.dll',
'dwmredir.dll',
'dwrite.dll',
'dxdiagn.dll',
'dxmasf.dll',
'dxp.dll',
'dxpps.dll',
'dxptaskringtone.dll',
'dxptasksync.dll',
'dxtmsft.dll',
'dxtrans.dll',
'eapp3hst.dll',
'eappgnui.dll',
'eapphost.dll',
'eappprxy.dll',
'eapqec.dll',
'eapsvc.dll',
'efscore.dll',
'efslsaext.dll',
'efssvc.dll',
'ehstorapi.dll',
'ehstorpwdmgr.dll',
'ehstorshell.dll',
'els.dll',
'elscore.dll',
'elslad.dll',
'elstrans.dll',
'encapi.dll',
'encdec.dll',
'encdump.dll',
'energy.dll',
'eqossnap.dll',
'es.dll',
'esent.dll',
'esentprf.dll',
'eventcls.dll',
'explorerframe.dll',
'expsrv.dll',
'f3ahvoas.dll',
'faultrep.dll',
'fdbth.dll',
'fdbthproxy.dll',
'fde.dll',
'fdeploy.dll',
'fdphost.dll',
'fdpnp.dll',
'fdprint.dll',
'fdproxy.dll',
'fdrespub.dll',
'fdssdp.dll',
'fdwcn.dll',
'fdwnet.dll',
'fdwsd.dll',
'feclient.dll',
'feedbacktool.dll',
'filemgmt.dll',
'findnetprinters.dll',
'firewallapi.dll',
'firewallcontrolpanel.dll',
'fltlib.dll',
'fmifs.dll',
'fntcache.dll',
'fontext.dll',
'fphc.dll',
'framebuf.dll',
'framedyn.dll',
'fthsvc.dll',
'fundisc.dll',
'fveapi.dll',
'fveapibase.dll',
'fvecerts.dll',
'fvecpl.dll',
'fverecover.dll',
'fveui.dll',
'fvewiz.dll',
'fwcfg.dll',
'fwpuclnt.dll',
'fwremotesvr.dll',
'fxsapi.dll',
'fxscom.dll',
'fxscomex.dll',
'fxscompose.dll',
'fxscomposeres.dll',
'fxsevent.dll',
'fxsext32.dll',
'fxsmon.dll',
'fxsresm.dll',
'fxsroute.dll',
'fxsst.dll',
'fxst30.dll',
'fxstiff.dll',
'fxsutility.dll',
'fxsxp32.dll',
'gacinstall.dll',
'gameux.dll',
'gameuxlegacygdfs.dll',
'gcdef.dll',
'gdi32.dll',
'getuname.dll',
'glmf32.dll',
'gpapi.dll',
'gpedit.dll',
'gpprefcl.dll',
'gpprnext.dll',
'gpscript.dll',
'gpsvc.dll',
'gptext.dll',
'groupinghc.dll',
'hal.dll',
'halacpi.dll',
'halmacpi.dll',
'hbaapi.dll',
'hcproviders.dll',
'helppaneproxy.dll',
'hgcpl.dll',
'hgprint.dll',
'hhsetup.dll',
'hidserv.dll',
'hnetcfg.dll',
'hnetmon.dll',
'hotplug.dll',
'hotstartuseragent.dll',
'htui.dll',
'ias.dll',
'iasacct.dll',
'iasads.dll',
'iasdatastore.dll',
'iashlpr.dll',
'iasmigplugin.dll',
'iasnap.dll',
'iaspolcy.dll',
'iasrad.dll',
'iasrecst.dll',
'iassam.dll',
'iassdo.dll',
'iassvcs.dll',
'icaapi.dll',
'icardie.dll',
'icardres.dll',
'iccoinstall.dll',
'icfupgd.dll',
'icm32.dll',
'icmui.dll',
'iconcodecservice.dll',
'icsigd.dll',
'idlisten.dll',
'idndl.dll',
'idstore.dll',
'ieakeng.dll',
'ieaksie.dll',
'ieakui.dll',
'ieapfltr.dll',
'iedkcs32.dll',
'iepeers.dll',
'iernonce.dll',
'iesetup.dll',
'iesysprep.dll',
'ifmon.dll',
'ifsutil.dll',
'ifsutilx.dll',
'igddiag.dll',
'ikeext.dll',
'imagehlp.dll',
'imageres.dll',
'imagesp1.dll',
'imapi.dll',
'imapi2.dll',
'imapi2fs.dll',
'imjp10k.dll',
'inetmib1.dll',
'inetpp.dll',
'inetppui.dll',
'inetres.dll',
'infocardapi.dll',
'inked.dll',
'input.dll',
'inseng.dll',
'iologmsg.dll',
'ipbusenum.dll',
'ipbusenumproxy.dll',
'iphlpapi.dll',
'iphlpsvc.dll',
'ipnathlp.dll',
'iprop.dll',
'iprtprio.dll',
'iprtrmgr.dll',
'ipsecsnp.dll',
'ipsecsvc.dll',
'ipsmsnap.dll',
'irclass.dll',
'irmon.dll',
'iscsicpl.dll',
'iscsidsc.dll',
'iscsied.dll',
'iscsiexe.dll',
'iscsilog.dll',
'iscsium.dll',
'iscsiwmi.dll',
'itircl.dll',
'itss.dll',
'itvdata.dll',
'iyuv_32.dll',
'jnwmon.dll',
'jscript.dll',
'jsproxy.dll',
'kbd101.dll',
'kbd101a.dll',
'kbd101b.dll',
'kbd101c.dll',
'kbd103.dll',
'kbd106.dll',
'kbd106n.dll',
'kbda1.dll',
'kbda2.dll',
'kbda3.dll',
'kbdal.dll',
'kbdarme.dll',
'kbdarmw.dll',
'kbdax2.dll',
'kbdaze.dll',
'kbdazel.dll',
'kbdbash.dll',
'kbdbe.dll',
'kbdbene.dll',
'kbdbgph.dll',
'kbdbgph1.dll',
'kbdbhc.dll',
'kbdblr.dll',
'kbdbr.dll',
'kbdbu.dll',
'kbdbulg.dll',
'kbdca.dll',
'kbdcan.dll',
'kbdcr.dll',
'kbdcz.dll',
'kbdcz1.dll',
'kbdcz2.dll',
'kbdda.dll',
'kbddiv1.dll',
'kbddiv2.dll',
'kbddv.dll',
'kbdes.dll',
'kbdest.dll',
'kbdfa.dll',
'kbdfc.dll',
'kbdfi.dll',
'kbdfi1.dll',
'kbdfo.dll',
'kbdfr.dll',
'kbdgae.dll',
'kbdgeo.dll',
'kbdgeoer.dll',
'kbdgeoqw.dll',
'kbdgkl.dll',
'kbdgr.dll',
'kbdgr1.dll',
'kbdgrlnd.dll',
'kbdhau.dll',
'kbdhe.dll',
'kbdhe220.dll',
'kbdhe319.dll',
'kbdheb.dll',
'kbdhela2.dll',
'kbdhela3.dll',
'kbdhept.dll',
'kbdhu.dll',
'kbdhu1.dll',
'kbdibm02.dll',
'kbdibo.dll',
'kbdic.dll',
'kbdinasa.dll',
'kbdinbe1.dll',
'kbdinbe2.dll',
'kbdinben.dll',
'kbdindev.dll',
'kbdinguj.dll',
'kbdinhin.dll',
'kbdinkan.dll',
'kbdinmal.dll',
'kbdinmar.dll',
'kbdinori.dll',
'kbdinpun.dll',
'kbdintam.dll',
'kbdintel.dll',
'kbdinuk2.dll',
'kbdir.dll',
'kbdit.dll',
'kbdit142.dll',
'kbdiulat.dll',
'kbdjpn.dll',
'kbdkaz.dll',
'kbdkhmr.dll',
'kbdkor.dll',
'kbdkyr.dll',
'kbdla.dll',
'kbdlao.dll',
'kbdlk41a.dll',
'kbdlt.dll',
'kbdlt1.dll',
'kbdlt2.dll',
'kbdlv.dll',
'kbdlv1.dll',
'kbdmac.dll',
'kbdmacst.dll',
'kbdmaori.dll',
'kbdmlt47.dll',
'kbdmlt48.dll',
'kbdmon.dll',
'kbdmonmo.dll',
'kbdne.dll',
'kbdnec.dll',
'kbdnec95.dll',
'kbdnecat.dll',
'kbdnecnt.dll',
'kbdnepr.dll',
'kbdno.dll',
'kbdno1.dll',
'kbdnso.dll',
'kbdpash.dll',
'kbdpl.dll',
'kbdpl1.dll',
'kbdpo.dll',
'kbdro.dll',
'kbdropr.dll',
'kbdrost.dll',
'kbdru.dll',
'kbdru1.dll',
'kbdsf.dll',
'kbdsg.dll',
'kbdsl.dll',
'kbdsl1.dll',
'kbdsmsfi.dll',
'kbdsmsno.dll',
'kbdsn1.dll',
'kbdsorex.dll',
'kbdsors1.dll',
'kbdsorst.dll',
'kbdsp.dll',
'kbdsw.dll',
'kbdsw09.dll',
'kbdsyr1.dll',
'kbdsyr2.dll',
'kbdtajik.dll',
'kbdtat.dll',
'kbdth0.dll',
'kbdth1.dll',
'kbdth2.dll',
'kbdth3.dll',
'kbdtiprc.dll',
'kbdtuf.dll',
'kbdtuq.dll',
'kbdturme.dll',
'kbdughr.dll',
'kbdughr1.dll',
'kbduk.dll',
'kbdukx.dll',
'kbdur.dll',
'kbdur1.dll',
'kbdurdu.dll',
'kbdus.dll',
'kbdusa.dll',
'kbdusl.dll',
'kbdusr.dll',
'kbdusx.dll',
'kbduzb.dll',
'kbdvntc.dll',
'kbdwol.dll',
'kbdyak.dll',
'kbdyba.dll',
'kbdycc.dll',
'kbdycl.dll',
'kd1394.dll',
'kdcom.dll',
'kdusb.dll',
'kerberos.dll',
'kernel32.dll',
'kernelbase.dll',
'kernelceip.dll',
'keyiso.dll',
'keymgr.dll',
'kmsvc.dll',
'korwbrkr.dll',
'ksuser.dll',
'l2gpstore.dll',
'l2nacp.dll',
'l2sechc.dll',
'langcleanupsysprepaction.dll',
'laprxy.dll',
'licmgr10.dll',
'listsvc.dll',
'lltdapi.dll',
'lltdres.dll',
'lltdsvc.dll',
'lmhsvc.dll',
'loadperf.dll',
'localsec.dll',
'localspl.dll',
'localui.dll',
'locationapi.dll',
'loghours.dll',
'lpk.dll',
'lpksetupproxyserv.dll',
'lsasrv.dll',
'lsmproxy.dll',
'luainstall.dll',
'lz32.dll',
'magnification.dll',
'mapistub.dll',
'mcewmdrmndbootstrap.dll',
'mciavi32.dll',
'mcicda.dll',
'mciqtz32.dll',
'mciseq.dll',
'mciwave.dll',
'mcmde.dll',
'mcsrchph.dll',
'mctres.dll',
'mcupdate_authenticamd.dll',
'mcupdate_genuineintel.dll',
'mcx2svc.dll',
'mcxdriv.dll',
'mdminst.dll',
'mediametadatahandler.dll',
'memdiag.dll',
'mf3216.dll',
'mfaacenc.dll',
'mfc40.dll',
'mfc40u.dll',
'mfc42.dll',
'mfc42u.dll',
'mfcsubs.dll',
'mfds.dll',
'mfdvdec.dll',
'mferror.dll',
'mfh264enc.dll',
'mfmjpegdec.dll',
'mfplay.dll',
'mfps.dll',
'mfvdsp.dll',
'mfwmaaec.dll',
'microsoft-windows-hal-events.dll',
'microsoft-windows-kernel-power-events.dll',
'microsoft-windows-kernel-processor-power-events.dll',
'midimap.dll',
'migisol.dll',
'miguiresource.dll',
'mimefilt.dll',
'mmcbase.dll',
'mmci.dll',
'mmcico.dll',
'mmcndmgr.dll',
'mmcshext.dll',
'mmcss.dll',
'mmdevapi.dll',
'mmres.dll',
'modemui.dll',
'montr_ci.dll',
'moricons.dll',
'mp3dmod.dll',
'mp4sdecd.dll',
'mp43decd.dll',
'mpg4decd.dll',
'mpr.dll',
'mprapi.dll',
'mprddm.dll',
'mprdim.dll',
'mprmsg.dll',
'mpssvc.dll',
'msaatext.dll',
'msac3enc.dll',
'msacm32.dll',
'msafd.dll',
'msaudite.dll',
'mscandui.dll',
'mscat32.dll',
'msclmd.dll',
'mscms.dll',
'mscorier.dll',
'mscories.dll',
'mscpx32r.dll',
'mscpxl32.dll',
'msctf.dll',
'msctfmonitor.dll',
'msctfp.dll',
'msctfui.dll',
'msdadiag.dll',
'msdelta.dll',
'msdri.dll',
'msdtckrm.dll',
'msdtclog.dll',
'msdtcprx.dll',
'msdtctm.dll',
'msdtcuiu.dll',
'msdtcvsp1res.dll',
'msexch40.dll',
'msexcl40.dll',
'msfeedsbs.dll',
'msftedit.dll',
'mshtmled.dll',
'mshtmler.dll',
'msicofire.dll',
'msidcrl30.dll',
'msident.dll',
'msidle.dll',
'msidntld.dll',
'msieftp.dll',
'msihnd.dll',
'msimg32.dll',
'msimsg.dll',
'msimtf.dll',
'msisip.dll',
'msjet40.dll',
'msjetoledb40.dll',
'msjint40.dll',
'msjter40.dll',
'msjtes40.dll',
'msltus40.dll',
'msmmsp.dll',
'msmpeg2adec.dll',
'msmpeg2enc.dll',
'msmpeg2vdec.dll',
'msnetobj.dll',
'msobjs.dll',
'msoeacct.dll',
'msoert2.dll',
'msorc32r.dll',
'msorcl32.dll',
'mspatcha.dll',
'mspbda.dll',
'mspbdacoinst.dll',
'mspbde40.dll',
'msports.dll',
'msprivs.dll',
'msrahc.dll',
'msrating.dll',
'msrd2x40.dll',
'msrd3x40.dll',
'msrdc.dll',
'msrdpwebaccess.dll',
'msrepl40.dll',
'msrle32.dll',
'msscntrs.dll',
'msscp.dll',
'mssha.dll',
'msshavmsg.dll',
'msshooks.dll',
'mssign32.dll',
'mssip32.dll',
'mssitlb.dll',
'mssph.dll',
'mssphtb.dll',
'mssprxy.dll',
'mssrch.dll',
'mssvp.dll',
'msswch.dll',
'mstask.dll',
'mstext40.dll',
'mstime.dll',
'mstscax.dll',
'msutb.dll',
'msv1_0.dll',
'msvbvm60.dll',
'msvcirt.dll',
'msvcp60.dll',
'msvcr70.dll',
'msvcrt.dll',
'msvcrt40.dll',
'msvfw32.dll',
'msvidc32.dll',
'msvidctl.dll',
'mswdat10.dll',
'mswmdm.dll',
'mswsock.dll',
'mswstr10.dll',
'msxbde40.dll',
'msxml3.dll',
'msxml3r.dll',
'msxml6.dll',
'msxml6r.dll',
'msyuv.dll',
'mtxclu.dll',
'mtxdm.dll',
'mtxex.dll',
'mtxlegih.dll',
'mtxoci.dll',
'muifontsetup.dll',
'muilanguagecleanup.dll',
'mycomput.dll',
'mydocs.dll',
'napcrypt.dll',
'napdsnap.dll',
'naphlpr.dll',
'napinsp.dll',
'napipsec.dll',
'napmontr.dll',
'nativehooks.dll',
'naturallanguage6.dll',
'ncdprop.dll',
'nci.dll',
'ncryptui.dll',
'ncsi.dll',
'nddeapi.dll',
'ndfetw.dll',
'ndfhcdiscovery.dll',
'ndiscapcfg.dll',
'ndishc.dll',
'ndproxystub.dll',
'negoexts.dll',
'netapi32.dll',
'netbios.dll',
'netcenter.dll',
'netcfgx.dll',
'netcorehc.dll',
'netdiagfx.dll',
'netevent.dll',
'netfxperf.dll',
'neth.dll',
'netid.dll',
'netiohlp.dll',
'netjoin.dll',
'netlogon.dll',
'netman.dll',
'netmsg.dll',
'netplwiz.dll',
'netprof.dll',
'netprofm.dll',
'netprojw.dll',
'netshell.dll',
'nettrace.dll',
'networkexplorer.dll',
'networkitemfactory.dll',
'networkmap.dll',
'nlaapi.dll',
'nlahc.dll',
'nlasvc.dll',
'nlhtml.dll',
'nlmgp.dll',
'nlmsprep.dll',
'nlsbres.dll',
'nlsdata0000.dll',
'nlsdata000a.dll',
'nlsdata000c.dll',
'nlsdata0c1a.dll',
'nlsdata000d.dll',
'nlsdata000f.dll',
'nlsdata0001.dll',
'nlsdata001a.dll',
'nlsdata001b.dll',
'nlsdata001d.dll',
'nlsdata0002.dll',
'nlsdata002a.dll',
'nlsdata0003.dll',
'nlsdata003e.dll',
'nlsdata004a.dll',
'nlsdata004b.dll',
'nlsdata004c.dll',
'nlsdata004e.dll',
'nlsdata0007.dll',
'nlsdata0009.dll',
'nlsdata0010.dll',
'nlsdata0011.dll',
'nlsdata0013.dll',
'nlsdata0018.dll',
'nlsdata0019.dll',
'nlsdata0020.dll',
'nlsdata0021.dll',
'nlsdata0022.dll',
'nlsdata0024.dll',
'nlsdata0026.dll',
'nlsdata0027.dll',
'nlsdata0039.dll',
'nlsdata0045.dll',
'nlsdata0046.dll',
'nlsdata0047.dll',
'nlsdata0049.dll',
'nlsdata081a.dll',
'nlsdata0414.dll',
'nlsdata0416.dll',
'nlsdata0816.dll',
'nlsdl.dll',
'nlslexicons000a.dll',
'nlslexicons000c.dll',
'nlslexicons0c1a.dll',
'nlslexicons000d.dll',
'nlslexicons000f.dll',
'nlslexicons0001.dll',
'nlslexicons001a.dll',
'nlslexicons001b.dll',
'nlslexicons001d.dll',
'nlslexicons0002.dll',
'nlslexicons002a.dll',
'nlslexicons0003.dll',
'nlslexicons003e.dll',
'nlslexicons004a.dll',
'nlslexicons004b.dll',
'nlslexicons004c.dll',
'nlslexicons004e.dll',
'nlslexicons0007.dll',
'nlslexicons0009.dll',
'nlslexicons0010.dll',
'nlslexicons0011.dll',
'nlslexicons0013.dll',
'nlslexicons0018.dll',
'nlslexicons0019.dll',
'nlslexicons0020.dll',
'nlslexicons0021.dll',
'nlslexicons0022.dll',
'nlslexicons0024.dll',
'nlslexicons0026.dll',
'nlslexicons0027.dll',
'nlslexicons0039.dll',
'nlslexicons0045.dll',
'nlslexicons0046.dll',
'nlslexicons0047.dll',
'nlslexicons0049.dll',
'nlslexicons081a.dll',
'nlslexicons0414.dll',
'nlslexicons0416.dll',
'nlslexicons0816.dll',
'nlsmodels0011.dll',
'npmproxy.dll',
'nrpsrv.dll',
'nshhttp.dll',
'nshipsec.dll',
'nshwfp.dll',
'nsisvc.dll',
'ntdll.dll',
'ntlanman.dll',
'ntlanui2.dll',
'ntmarta.dll',
'ntprint.dll',
'ntvdmd.dll',
'objsel.dll',
'ocsetapi.dll',
'odbc32gt.dll',
'odbcbcp.dll',
'odbcconf.dll',
'odbccr32.dll',
'odbccu32.dll',
'odbcint.dll',
'odbcji32.dll',
'odbcjt32.dll',
'odbctrac.dll',
'oddbse32.dll',
'odexl32.dll',
'odfox32.dll',
'odpdx32.dll',
'odtext32.dll',
'offfilt.dll',
'ogldrv.dll',
'ole32.dll',
'oleacc.dll',
'oleacchooks.dll',
'oleaccrc.dll',
'oleaut32.dll',
'olecli32.dll',
'oledlg.dll',
'oleprn.dll',
'oleres.dll',
'olesvr32.dll',
'olethk32.dll',
'onex.dll',
'onexui.dll',
'onlineidcpl.dll',
'oobefldr.dll',
'opcservices.dll',
'osbaseln.dll',
'osuninst.dll',
'p2p.dll',
'p2pcollab.dll',
'p2pgraph.dll',
'p2pnetsh.dll',
'p2psvc.dll',
'packager.dll',
'panmap.dll',
'pautoenr.dll',
'pcadm.dll',
'pcaevts.dll',
'pcasvc.dll',
'pcaui.dll',
'pcwutl.dll',
'pdhui.dll',
'peerdist.dll',
'peerdisthttptrans.dll',
'peerdistsh.dll',
'peerdistsvc.dll',
'peerdistwsddiscoprov.dll',
'perfcentercpl.dll',
'perfctrs.dll',
'perfdisk.dll',
'perfnet.dll',
'perfos.dll',
'perfproc.dll',
'perftrack.dll',
'perfts.dll',
'photometadatahandler.dll',
'photowiz.dll',
'pid.dll',
'pidgenx.dll',
'pifmgr.dll',
'pku2u.dll',
'pla.dll',
'playsndsrv.dll',
'pmcsnap.dll',
'pngfilt.dll',
'pnidui.dll',
'pnpsetup.dll',
'pnpts.dll',
'pnpui.dll',
'pnpxassoc.dll',
'pnpxassocprx.dll',
'pnrpauto.dll',
'pnrphc.dll',
'pnrpnsp.dll',
'pnrpsvc.dll',
'polstore.dll',
'portabledeviceapi.dll',
'portabledeviceclassextension.dll',
'portabledeviceconnectapi.dll',
'portabledevicestatus.dll',
'portabledevicesyncprovider.dll',
'portabledevicetypes.dll',
'portabledevicewiacompat.dll',
'portabledevicewmdrm.dll',
'pots.dll',
'powercpl.dll',
'ppcsnap.dll',
'presentationcffrasterizernative_v0300.dll',
'presentationhostproxy.dll',
'presentationnative_v0300.dll',
'prflbmsg.dll',
'printfilterpipelineprxy.dll',
'printisolationproxy.dll',
'printui.dll',
'prncache.dll',
'prnfldr.dll',
'prnntfy.dll',
'procinst.dll',
'profprov.dll',
'profsvc.dll',
'provsvc.dll',
'provthrd.dll',
'psapi.dll',
'psbase.dll',
'pshed.dll',
'psisdecd.dll',
'pstorec.dll',
'pstorsvc.dll',
'puiapi.dll',
'puiobj.dll',
'pwrshplugin.dll',
'qagent.dll',
'qagentrt.dll',
'qasf.dll',
'qcap.dll',
'qcliprov.dll',
'qdv.dll',
'qdvd.dll',
'qedit.dll',
'qedwipes.dll',
'qmgr.dll',
'qmgrprxy.dll',
'qshvhost.dll',
'qsvrmgmt.dll',
'quartz.dll',
'query.dll',
'qutil.dll',
'racengn.dll',
'racpldlg.dll',
'radardt.dll',
'radarrs.dll',
'rasadhlp.dll',
'rasapi32.dll',
'rasauto.dll',
'rascfg.dll',
'raschap.dll',
'rasctrs.dll',
'rasdiag.dll',
'rasgcw.dll',
'rasman.dll',
'rasmans.dll',
'rasmbmgr.dll',
'rasmm.dll',
'rasmontr.dll',
'rasmxs.dll',
'rasplap.dll',
'rasppp.dll',
'rasser.dll',
'rastapi.dll',
'rastls.dll',
'rdpcfgex.dll',
'rdpcore.dll',
'rdpcorekmts.dll',
'rdpd3d.dll',
'rdpdd.dll',
'rdpencdd.dll',
'rdpencom.dll',
'rdpendp.dll',
'rdprefdd.dll',
'rdprefdrvapi.dll',
'rdpwsx.dll',
'reagent.dll',
'recovery.dll',
'regapi.dll',
'regctrl.dll',
'regidle.dll',
'regsvc.dll',
'remotepg.dll',
'resampledmo.dll',
'resutils.dll',
'rgb9rast.dll',
'riched20.dll',
'riched32.dll',
'rnr20.dll',
'rpcdiag.dll',
'rpcepmap.dll',
'rpchttp.dll',
'rpcndfp.dll',
'rpcns4.dll',
'rpcnsh.dll',
'rpcrtremote.dll',
'rpcss.dll',
'rsaenh.dll',
'rshx32.dll',
'rstrtmgr.dll',
'rtffilt.dll',
'rtm.dll',
'samlib.dll',
'sampleres.dll',
'samsrv.dll',
'sas.dll',
'sbe.dll',
'sbeio.dll',
'sberes.dll',
'scansetting.dll',
'scarddlg.dll',
'scardsvr.dll',
'sccls.dll',
'scecli.dll',
'scext.dll',
'schannel.dll',
'schedcli.dll',
'schedsvc.dll',
'scksp.dll',
'scripto.dll',
'scrobj.dll',
'scrptadm.dll',
'scrrun.dll',
'sdautoplay.dll',
'sdcpl.dll',
'sdengin2.dll',
'sdhcinst.dll',
'sdiageng.dll',
'sdiagprv.dll',
'sdiagschd.dll',
'sdohlp.dll',
'sdrsvc.dll',
'sdshext.dll',
'searchfolder.dll',
'sechost.dll',
'seclogon.dll',
'secproc.dll',
'secproc_isv.dll',
'secproc_ssp.dll',
'secproc_ssp_isv.dll',
'sendmail.dll',
'sens.dll',
'sensapi.dll',
'sensorsapi.dll',
'sensorsclassextension.dll',
'sensorscpl.dll',
'sensrsvc.dll',
'serialui.dll',
'serwvdrv.dll',
'sessenv.dll',
'setbcdlocale.dll',
'setupcln.dll',
'setupetw.dll',
'sfc.dll',
'sfc_os.dll',
'shacct.dll',
'sharemediacpl.dll',
'shdocvw.dll',
'shell32.dll',
'shellstyle.dll',
'shfolder.dll',
'shgina.dll',
'shimeng.dll',
'shimgvw.dll',
'shlwapi.dll',
'shpafact.dll',
'shsetup.dll',
'shsvcs.dll',
'shunimpl.dll',
'shwebsvc.dll',
'signdrv.dll',
'sisbkup.dll',
'slc.dll',
'slcext.dll',
'slwga.dll',
'smartcardcredentialprovider.dll',
'smbhelperclass.dll',
'smiengine.dll',
'sndvolsso.dll',
'snmpapi.dll',
'sntsearch.dll',
'softkbd.dll',
'softpub.dll',
'sortserver2003compat.dll',
'sortwindows6compat.dll',
'spbcd.dll',
'spcmsg.dll',
'sperror.dll',
'spfileq.dll',
'spinf.dll',
'spnet.dll',
'spopk.dll',
'sppc.dll',
'sppcc.dll',
'sppcext.dll',
'sppcomapi.dll',
'sppcommdlg.dll',
'sppinst.dll',
'sppnp.dll',
'sppobjs.dll',
'sppuinotify.dll',
'sppwinob.dll',
'sppwmi.dll',
'spwinsat.dll',
'spwizeng.dll',
'spwizimg.dll',
'spwizres.dll',
'spwizui.dll',
'spwmp.dll',
'sqlceoledb30.dll',
'sqlceqp30.dll',
'sqlcese30.dll',
'sqlsrv32.dll',
'sqlunirl.dll',
'sqlwid.dll',
'sqlwoa.dll',
'srchadmin.dll',
'srclient.dll',
'srcore.dll',
'srhelper.dll',
'srpuxnativesnapin.dll',
'srrstr.dll',
'srvsvc.dll',
'srwmi.dll',
'sscore.dll',
'ssdpapi.dll',
'ssdpsrv.dll',
'sspisrv.dll',
'ssshim.dll',
'sstpsvc.dll',
'stclient.dll',
'sti_ci.dll',
'stobject.dll',
'storagecontexthandler.dll',
'storprop.dll',
'streamci.dll',
'structuredquery.dll',
'sud.dll',
'swprv.dll',
'sxproxy.dll',
'sxs.dll',
'sxshared.dll',
'sxssrv.dll',
'sxsstore.dll',
'synccenter.dll',
'synceng.dll',
'synchostps.dll',
'syncinfrastructure.dll',
'syncinfrastructureps.dll',
'syncreg.dll',
'syncui.dll',
'sysclass.dll',
'sysfxui.dll',
'sysmain.dll',
'sysntfy.dll',
'sysprepmce.dll',
'syssetup.dll',
'systemcpl.dll',
'tabbtn.dll',
'tabbtnex.dll',
'tabsvc.dll',
'tapi3.dll',
'tapi32.dll',
'tapilua.dll',
'tapimigplugin.dll',
'tapiperf.dll',
'tapisrv.dll',
'tapisysprep.dll',
'tapiui.dll',
'taskbarcpl.dll',
'taskcomp.dll',
'taskschd.dll',
'taskschdps.dll',
'tbssvc.dll',
'tcpipcfg.dll',
'tcpmib.dll',
'tcpmon.dll',
'tcpmonui.dll',
'termmgr.dll',
'termsrv.dll',
'thawbrkr.dll',
'themecpl.dll',
'themeservice.dll',
'themeui.dll',
'thumbcache.dll',
'timedatemuicallback.dll',
'tlscsp.dll',
'tpmcompc.dll',
'tquery.dll',
'trapi.dll',
'trkwks.dll',
'tsbyuv.dll',
'tscfgwmi.dll',
'tschannel.dll',
'tsddd.dll',
'tserrredir.dll',
'tsgqec.dll',
'tsmf.dll',
'tspkg.dll',
'tspnprdrcoinstaller.dll',
'tspubwmi.dll',
'tsworkspace.dll',
'tvratings.dll',
'twext.dll',
'txflog.dll',
'txfw32.dll',
'tzres.dll',
'ubpm.dll',
'ucmhc.dll',
'udhisapi.dll',
'udwm.dll',
'uexfat.dll',
'ufat.dll',
'uianimation.dll',
'uiautomationcore.dll',
'uicom.dll',
'uihub.dll',
'uiribbon.dll',
'uiribbonres.dll',
'ulib.dll',
'umb.dll',
'umdmxfrm.dll',
'umpo.dll',
'umrdp.dll',
'unattend.dll',
'unimdmat.dll',
'uniplat.dll',
'untfs.dll',
'upnp.dll',
'upnphost.dll',
'ureg.dll',
'urlmon.dll',
'usbceip.dll',
'usbmon.dll',
'usbperf.dll',
'usbui.dll',
'user32.dll',
'useraccountcontrolsettings.dll',
'usercpl.dll',
'userenv.dll',
'utildll.dll',
'uudf.dll',
'uxinit.dll',
'uxlib.dll',
'uxlibres.dll',
'uxsms.dll',
'uxtheme.dll',
'van.dll',
'vault.dll',
'vaultcredprovider.dll',
'vaultsvc.dll',
'vbajet32.dll',
'vboxoglarrayspu.dll',
'vboxoglcrutil.dll',
'vboxoglerrorspu.dll',
'vboxoglfeedbackspu.dll',
'vboxoglpackspu.dll',
'vboxoglpassthroughspu.dll',
'vbscript.dll',
'vdmdbg.dll',
'vds_ps.dll',
'vdsbas.dll',
'vdsdyn.dll',
'vdsutil.dll',
'vdsvd.dll',
'verifier.dll',
'version.dll',
'vfpodbc.dll',
'vfwwdm32.dll',
'vga.dll',
'vga64k.dll',
'vga256.dll',
'vidreszr.dll',
'vmbuscoinstaller.dll',
'vmbuspipe.dll',
'vmbusres.dll',
'vmdcoinstall.dll',
'vmicres.dll',
'vmictimeprovider.dll',
'vmstorfltres.dll',
'vpnike.dll',
'vpnikeapi.dll',
'vss_ps.dll',
'vssapi.dll',
'w32time.dll',
'w32topl.dll',
'wabsyncprovider.dll',
'wavdest.dll',
'wavemsp.dll',
'wbemcomn.dll',
'wbiosrvc.dll',
'wcnapi.dll',
'wcncsvc.dll',
'wcneapauthproxy.dll',
'wcneappeerproxy.dll',
'wcnnetsh.dll',
'wcnwiz.dll',
'wcspluginservice.dll',
'wdc.dll',
'wdi.dll',
'wdiasqmmodule.dll',
'wdigest.dll',
'webcheck.dll',
'webclnt.dll',
'webio.dll',
'wecapi.dll',
'wecsvc.dll',
'werconcpl.dll',
'wercplsupport.dll',
'werdiagcontroller.dll',
'wersvc.dll',
'werui.dll',
'wevtfwd.dll',
'wevtsvc.dll',
'wfapigp.dll',
'wfhc.dll',
'wfsr.dll',
'whealogr.dll',
'whhelper.dll',
'wiaaut.dll',
'wiadefui.dll',
'wiadss.dll',
'wiarpc.dll',
'wiascanprofiles.dll',
'wiaservc.dll',
'wiashext.dll',
'wiatrace.dll',
'wiavideo.dll',
'win32spl.dll',
'winbio.dll',
'wincredprovider.dll',
'windowscodecs.dll',
'windowscodecsext.dll',
'winethc.dll',
'winfax.dll',
'winhttp.dll',
'wininet - copy.dll',
'wininet.dll',
'winipsec.dll',
'winmm.dll',
'winrnr.dll',
'winrscmd.dll',
'winrsmgr.dll',
'winrssrv.dll',
'winsatapi.dll',
'winscard.dll',
'winshfhc.dll',
'winsockhc.dll',
'winsrpc.dll',
'winsrv.dll',
'winsta.dll',
'winsync.dll',
'winsyncmetastore.dll',
'winsyncproviders.dll',
'wksprtps.dll',
'wkssvc.dll',
'wlancfg.dll',
'wlanconn.dll',
'wlandlg.dll',
'wlangpui.dll',
'wlanhc.dll',
'wlanhlp.dll',
'wlaninst.dll',
'wlanmm.dll',
'wlanmsm.dll',
'wlanpref.dll',
'wlansec.dll',
'wlansvc.dll',
'wlanui.dll',
'wldap32.dll',
'wlgpclnt.dll',
'wls0wndh.dll',
'wmadmod.dll',
'wmadmoe.dll',
'wmalfxgfxdsp.dll',
'wmasf.dll',
'wmcodecdspps.dll',
'wmdmlog.dll',
'wmdmps.dll',
'wmdrmdev.dll',
'wmdrmnet.dll',
'wmdrmsdk.dll',
'wmerror.dll',
'wmi.dll',
'wmicmiplugin.dll',
'wmidx.dll',
'wmiprop.dll',
'wmnetmgr.dll',
'wmp.dll',
'wmpcm.dll',
'wmpdui.dll',
'wmpdxm.dll',
'wmpeffects.dll',
'wmpencen.dll',
'wmphoto.dll',
'wmploc.dll',
'wmpmde.dll',
'wmpps.dll',
'wmpshell.dll',
'wmpsrcwp.dll',
'wmspdmod.dll',
'wmspdmoe.dll',
'wmvcore.dll',
'wmvdecod.dll',
'wmvdspa.dll',
'wmvencod.dll',
'wmvsdecd.dll',
'wmvsencd.dll',
'wmvxencd.dll',
'wpc.dll',
'wpcao.dll',
'wpccpl.dll',
'wpcmig.dll',
'wpcsvc.dll',
'wpcumi.dll',
'wpd_ci.dll',
'wpdbusenum.dll',
'wpdshext.dll',
'wpdshserviceobj.dll',
'wpdsp.dll',
'wpdwcn.dll',
'ws2_32.dll',
'ws2help.dll',
'wscapi.dll',
'wscinterop.dll',
'wscisvif.dll',
'wscmisetup.dll',
'wscproxystub.dll',
'wscsvc.dll',
'wsdapi.dll',
'wsdchngr.dll',
'wsdmon.dll',
'wsdprintproxy.dll',
'wsdscanproxy.dll',
'wsecedit.dll',
'wsepno.dll',
'wshbth.dll',
'wshcon.dll',
'wshelper.dll',
'wshext.dll',
'wship6.dll',
'wshirda.dll',
'wshnetbs.dll',
'wshqos.dll',
'wshrm.dll',
'wshtcpip.dll',
'wsmanmigrationplugin.dll',
'wsmauto.dll',
'wsmplpxy.dll',
'wsmres.dll',
'wsmsvc.dll',
'wsmwmipl.dll',
'wsock32.dll',
'wtsapi32.dll',
'wuapi.dll',
'wuaueng.dll',
'wucltux.dll',
'wudfcoinstaller.dll',
'wudfplatform.dll',
'wudfsvc.dll',
'wudfx.dll',
'wudriver.dll',
'wups.dll',
'wups2.dll',
'wuwebv.dll',
'wvc.dll',
'wwanadvui.dll',
'wwanapi.dll',
'wwancfg.dll',
'wwanconn.dll',
'wwanhc.dll',
'wwaninst.dll',
'wwanmm.dll',
'wwanpref.dll',
'wwanprotdim.dll',
'wwansvc.dll',
'wwapi.dll',
'wzcdlg.dll',
'xinput9_1_0.dll',
'xmlfilter.dll',
'xmlprovi.dll',
'xolehlp.dll',
'xpsfilt.dll',
'xpsgdiconverter.dll',
'xpsprint.dll',
'xpsrasterservice.dll',
'xpsservices.dll',
'xpsshhdr.dll',
'xpssvcs.dll',
'xwizards.dll',
'xwreg.dll',
'xwtpdui.dll',
'xwtpw32.dll',
'zgmprxy.dll',
'zipfldr.dll',
])
WIN8_DLLS = frozenset([
'adhapi.dll',
'adhsvc.dll',
'adrclient.dll',
'api-ms-win-appmodel-identity-l1-1-0.dll',
'api-ms-win-appmodel-state-l1-1-0.dll',
'api-ms-win-base-bootconfig-l1-1-0.dll',
'api-ms-win-base-util-l1-1-0.dll',
'api-ms-win-core-appcompat-l1-1-0.dll',
'api-ms-win-core-appinit-l1-1-0.dll',
'api-ms-win-core-bem-l1-1-0.dll',
'api-ms-win-core-bicltapi-l1-1-0.dll',
'api-ms-win-core-biplmapi-l1-1-0.dll',
'api-ms-win-core-biptcltapi-l1-1-0.dll',
'api-ms-win-core-comm-l1-1-0.dll',
'api-ms-win-core-fibers-l2-1-0.dll',
'api-ms-win-core-firmware-l1-1-0.dll',
'api-ms-win-core-localization-obsolete-l1-1-0.dll',
'api-ms-win-core-multipleproviderrouter-l1-1-0.dll',
'api-ms-win-core-processsecurity-l1-1-0.dll',
'api-ms-win-core-processtopology-l1-1-0.dll',
'api-ms-win-core-psapi-obsolete-l1-1-0.dll',
'api-ms-win-core-psm-info-l1-1-0.dll',
'api-ms-win-core-psm-plm-l1-1-0.dll',
'api-ms-win-core-registry-private-l1-1-0.dll',
'api-ms-win-core-stringloader-l1-1-0.dll',
'api-ms-win-core-timezone-private-l1-1-0.dll',
'api-ms-win-core-version-private-l1-1-0.dll',
'api-ms-win-core-versionansi-l1-1-0.dll',
'api-ms-win-core-winrt-errorprivate-l1-1-0.dll',
'api-ms-win-core-winrt-propertysetprivate-l1-1-0.dll',
'api-ms-win-core-winrt-registration-l1-1-0.dll',
'api-ms-win-core-winrt-roparameterizediid-l1-1-0.dll',
'api-ms-win-core-xstate-l1-1-1.dll',
'api-ms-win-devices-config-l1-1-0.dll',
'api-ms-win-devices-swdevice-l1-1-0.dll',
'api-ms-win-eventing-obsolete-l1-1-0.dll',
'api-ms-win-eventlog-private-l1-1-0.dll',
'api-ms-win-gdi-ie-rgn-l1-1-0.dll',
'api-ms-win-http-time-l1-1-0.dll',
'api-ms-win-input-ie-interactioncontext-l1-1-0.dll',
'api-ms-win-mm-joystick-l1-1-0.dll',
'api-ms-win-mm-mci-l1-1-0.dll',
'api-ms-win-mm-misc-l1-1-0.dll',
'api-ms-win-mm-misc-l2-1-0.dll',
'api-ms-win-mm-mme-l1-1-0.dll',
'api-ms-win-mm-playsound-l1-1-0.dll',
'api-ms-win-net-isolation-l1-1-0.dll',
'api-ms-win-ntuser-dc-access-l1-1-0.dll',
'api-ms-win-ntuser-ie-clipboard-l1-1-0.dll',
'api-ms-win-ntuser-ie-message-l1-1-0.dll',
'api-ms-win-ntuser-ie-window-l1-1-0.dll',
'api-ms-win-ntuser-ie-wmpointer-l1-1-0.dll',
'api-ms-win-ntuser-uicontext-l1-1-0.dll',
'api-ms-win-ro-typeresolution-l1-1-0.dll',
'api-ms-win-security-appcontainer-l1-1-0.dll',
'api-ms-win-security-audit-l1-1-0.dll',
'api-ms-win-security-base-private-l1-1-0.dll',
'api-ms-win-security-sddl-ansi-l1-1-0.dll',
'api-ms-win-security-sddlparsecond-l1-1-0.dll',
'api-ms-win-shlwapi-ie-l1-1-0.dll',
'apprepapi.dll',
'apprepsync.dll',
'appsruprov.dll',
'appxalluserstore.dll',
'appxdeploymentclient.dll',
'appxdeploymentextensions.dll',
'appxdeploymentserver.dll',
'appxpackaging.dll',
'appxsip.dll',
'appxstreamingdatasourceps.dll',
'appxsysprep.dll',
'aspnet_counters.dll',
'audioendpointbuilder.dll',
'auinstallagent.dll',
'authbroker.dll',
'authext.dll',
'authhostproxy.dll',
'bcp47langs.dll',
'bi.dll',
'bisrv.dll',
'bitsprx7.dll',
'biwinrt.dll',
'bluetoothapis.dll',
'bootmenuux.dll',
'bootux.dll',
'bthhfsrv.dll',
'bthradiomedia.dll',
'bthsqm.dll',
'callbuttons.dll',
'callbuttons.proxystub.dll',
'certca.dll',
'cfmifs.dll',
'cfmifsproxy.dll',
'chartv.dll',
'clrhost.dll',
'cmdext.dll',
'cngcredui.dll',
'connectedaccountstate.dll',
'consentux.dll',
'cryptcatsvc.dll',
'cryptowinrt.dll',
'cryptuiwizard.dll',
'daconn.dll',
'dafbth.dll',
'dafprintprovider.dll',
'dafupnp.dll',
'dafwcn.dll',
'dafwfdprovider.dll',
'dafwsd.dll',
'damm.dll',
'daotpcredentialprovider.dll',
'das.dll',
'datusage.dll',
'ddp_ps.dll',
'ddpchunk.dll',
'ddptrace.dll',
'ddputils.dll',
'defaultdevicemanager.dll',
'defaultprinterprovider.dll',
'delegatorprovider.dll',
'devdispitemprovider.dll',
'deviceaccess.dll',
'devicedriverretrievalclient.dll',
'deviceelementsource.dll',
'devicemetadataretrievalclient.dll',
'devicesetupmanager.dll',
'devicesetupmanagerapi.dll',
'devicesetupstatusprovider.dll',
'devinv.dll',
'devpropmgr.dll',
'discan.dll',
'dismapi.dll',
'dlnashext.dll',
'dmvscres.dll',
'dot3mm.dll',
'dpapi.dll',
'dpapisrv.dll',
'dsui.dll',
'dxgwdi.dll',
'easconsent.dll',
'easinvoker.proxystub.dll',
'easwrt.dll',
'elshyph.dll',
'energyprov.dll',
'energytask.dll',
'ext-ms-win-advapi32-auth-l1-1-0.dll',
'ext-ms-win-advapi32-encryptedfile-l1-1-0.dll',
'ext-ms-win-advapi32-eventingcontroller-l1-1-0.dll',
'ext-ms-win-advapi32-eventlog-l1-1-0.dll',
'ext-ms-win-advapi32-lsa-l1-1-0.dll',
'ext-ms-win-advapi32-msi-l1-1-0.dll',
'ext-ms-win-advapi32-ntmarta-l1-1-0.dll',
'ext-ms-win-advapi32-psm-app-l1-1-0.dll',
'ext-ms-win-advapi32-registry-l1-1-0.dll',
'ext-ms-win-advapi32-safer-l1-1-0.dll',
'ext-ms-win-advapi32-shutdown-l1-1-0.dll',
'ext-ms-win-authz-claimpolicies-l1-1-0.dll',
'ext-ms-win-authz-context-l1-1-0.dll',
'ext-ms-win-authz-remote-l1-1-0.dll',
'ext-ms-win-biometrics-winbio-l1-1-0.dll',
'ext-ms-win-bluetooth-deviceassociation-l1-1-0.dll',
'ext-ms-win-branding-winbrand-l1-1-0.dll',
'ext-ms-win-cluster-clusapi-l1-1-0.dll',
'ext-ms-win-cluster-resutils-l1-1-0.dll',
'ext-ms-win-cmd-util-l1-1-0.dll',
'ext-ms-win-cng-rng-l1-1-0.dll',
'ext-ms-win-com-clbcatq-l1-1-0.dll',
'ext-ms-win-com-psmregister-l1-1-0.dll',
'ext-ms-win-domainjoin-netjoin-l1-1-0.dll',
'ext-ms-win-firewallapi-webproxy-l1-1-0.dll',
'ext-ms-win-fs-clfs-l1-1-0.dll',
'ext-ms-win-fsutilext-ifsutil-l1-1-0.dll',
'ext-ms-win-fsutilext-ulib-l1-1-0.dll',
'ext-ms-win-gdi-dc-l1-1-0.dll',
'ext-ms-win-gdi-devcaps-l1-1-0.dll',
'ext-ms-win-gdi-metafile-l1-1-0.dll',
'ext-ms-win-gdi-path-l1-1-0.dll',
'ext-ms-win-gdi-render-l1-1-0.dll',
'ext-ms-win-gdi-rgn-l1-1-0.dll',
'ext-ms-win-gdi-wcs-l1-1-0.dll',
'ext-ms-win-gpapi-grouppolicy-l1-1-0.dll',
'ext-ms-win-gui-uxinit-l1-1-0.dll',
'ext-ms-win-kernel32-appcompat-l1-1-0.dll',
'ext-ms-win-kernel32-datetime-l1-1-0.dll',
'ext-ms-win-kernel32-errorhandling-l1-1-0.dll',
'ext-ms-win-kernel32-file-l1-1-0.dll',
'ext-ms-win-kernel32-package-current-l1-1-0.dll',
'ext-ms-win-kernel32-registry-l1-1-0.dll',
'ext-ms-win-kernel32-sidebyside-l1-1-0.dll',
'ext-ms-win-kernel32-transacted-l1-1-0.dll',
'ext-ms-win-kernel32-windowserrorreporting-l1-1-0.dll',
'ext-ms-win-kernelbase-processthread-l1-1-0.dll',
'ext-ms-win-mf-winmm-l1-1-0.dll',
'ext-ms-win-mm-msacm-l1-1-0.dll',
'ext-ms-win-mm-pehelper-l1-1-0.dll',
'ext-ms-win-mm-wmdrmsdk-l1-1-0.dll',
'ext-ms-win-mpr-multipleproviderrouter-l1-1-0.dll',
'ext-ms-win-mrmcorer-resmanager-l1-1-0.dll',
'ext-ms-win-msiltcfg-msi-l1-1-0.dll',
'ext-ms-win-networking-winipsec-l1-1-0.dll',
'ext-ms-win-newdev-config-l1-1-0.dll',
'ext-ms-win-ntdsa-activedirectoryserver-l1-1-0.dll',
'ext-ms-win-ntdsapi-activedirectoryclient-l1-1-0.dll',
'ext-ms-win-ntos-ksecurity-l1-1-0.dll',
'ext-ms-win-ntos-ksigningpolicy-l1-1-0.dll',
'ext-ms-win-ntos-tm-l1-1-0.dll',
'ext-ms-win-ntuser-caret-l1-1-0.dll',
'ext-ms-win-ntuser-dc-access-ext-l1-1-0.dll',
'ext-ms-win-ntuser-menu-l1-1-0.dll',
'ext-ms-win-ntuser-mouse-l1-1-0.dll',
'ext-ms-win-ntuser-powermanagement-l1-1-0.dll',
'ext-ms-win-ntuser-private-l1-1-0.dll',
'ext-ms-win-ntuser-string-l1-1-0.dll',
'ext-ms-win-ntuser-sysparams-ext-l1-1-0.dll',
'ext-ms-win-ole32-clipboard-ie-l1-1-0.dll',
'ext-ms-win-ole32-ie-ext-l1-1-0.dll',
'ext-ms-win-ole32-oleautomation-l1-1-0.dll',
'ext-ms-win-printer-winspool-l1-1-0.dll',
'ext-ms-win-profile-profsvc-l1-1-0.dll',
'ext-ms-win-profile-userenv-l1-1-0.dll',
'ext-ms-win-ras-rasapi32-l1-1-0.dll',
'ext-ms-win-ras-rasdlg-l1-1-0.dll',
'ext-ms-win-ras-rasman-l1-1-0.dll',
'ext-ms-win-ras-tapi32-l1-1-0.dll',
'ext-ms-win-rometadata-dispenser-l1-1-0.dll',
'ext-ms-win-samsrv-accountstore-l1-1-0.dll',
'ext-ms-win-scesrv-server-l1-1-0.dll',
'ext-ms-win-secur32-translatename-l1-1-0.dll',
'ext-ms-win-security-cryptui-l1-1-0.dll',
'ext-ms-win-security-kerberos-l1-1-0.dll',
'ext-ms-win-security-vaultcli-l1-1-0.dll',
'ext-ms-win-session-userinit-l1-1-0.dll',
'ext-ms-win-session-wininit-l1-1-0.dll',
'ext-ms-win-setupapi-cfgmgr32local-l1-1-0.dll',
'ext-ms-win-setupapi-cfgmgr32remote-l1-1-0.dll',
'ext-ms-win-setupapi-classinstallers-l1-1-0.dll',
'ext-ms-win-setupapi-inf-l1-1-0.dll',
'ext-ms-win-setupapi-logging-l1-1-0.dll',
'ext-ms-win-shell-propsys-l1-1-0.dll',
'ext-ms-win-shell-shell32-l1-1-0.dll',
'ext-ms-win-shell-shlwapi-l1-1-0.dll',
'ext-ms-win-shell32-shellcom-l1-1-0.dll',
'ext-ms-win-smbshare-sscore-l1-1-0.dll',
'ext-ms-win-spinf-inf-l1-1-0.dll',
'ext-ms-win-sxs-oleautomation-l1-1-0.dll',
'ext-ms-win-umpoext-umpo-l1-1-0.dll',
'ext-ms-win-webio-pal-l1-1-0.dll',
'ext-ms-win-winhttp-pal-l1-1-0.dll',
'ext-ms-win-wininet-pal-l1-1-0.dll',
'ext-ms-win-wlan-grouppolicy-l1-1-0.dll',
'ext-ms-win-wlan-onexui-l1-1-0.dll',
'ext-ms-win-wlan-scard-l1-1-0.dll',
'ext-ms-win-wsclient-devlicense-l1-1-0.dll',
'ext-ms-win-wwan-wwapi-l1-1-0.dll',
'fddevquery.dll',
'fhautoplay.dll',
'fhcat.dll',
'fhcfg.dll',
'fhcleanup.dll',
'fhcpl.dll',
'fhengine.dll',
'fhevents.dll',
'fhlisten.dll',
'fhshl.dll',
'fhsrchapi.dll',
'fhsrchph.dll',
'fhsvc.dll',
'fhsvcctl.dll',
'fhtask.dll',
'fhuxadapter.dll',
'fhuxapi.dll',
'fhuxcommon.dll',
'fhuxgraphics.dll',
'fhuxpresentation.dll',
'fileappxstreamingdatasource.dll',
'fmapi.dll',
'fms.dll',
'frprov.dll',
'fsutilext.dll',
'fveskybackup.dll',
'gdiplus.dll',
'genuinecenter.dll',
'glcndfilter.dll',
'halextintclpiodma.dll',
'halextintcuartdma.dll',
'hotspotauth.dll',
'httpprxm.dll',
'httpprxp.dll',
'icsvc.dll',
'idctrls.dll',
'ieadvpack.dll',
'inputswitch.dll',
'ir32_32.dll',
'ir41_qc.dll',
'ir41_qcx.dll',
'ir50_32.dll',
'ir50_qc.dll',
'ir50_qcx.dll',
'iscsiwmiv2.dll',
'iuilp.dll',
'jscript9.dll',
'kbdarmph.dll',
'kbdarmty.dll',
'kbdcher.dll',
'kbdcherp.dll',
'kbdfar.dll',
'kbdgeome.dll',
'kbdgeooa.dll',
'kbdhaw.dll',
'kbdhebl3.dll',
'kbdinen.dll',
'kbdkni.dll',
'kbdkurd.dll',
'kbdlisub.dll',
'kbdlisus.dll',
'kbdmyan.dll',
'kbdnko.dll',
'kbdntl.dll',
'kbdogham.dll',
'kbdphags.dll',
'kbdrum.dll',
'kbdtaile.dll',
'kbdtifi.dll',
'kbdtifi2.dll',
'kd.dll',
'kd_02_10ec.dll',
'kd_02_14e4.dll',
'kd_02_8086.dll',
'kdhv1394.dll',
'kdnet.dll',
'kdscli.dll',
'kdstub.dll',
'kdvm.dll',
'keepaliveprovider.dll',
'livessp.dll',
'lldpnotify.dll',
'lscshostpolicy.dll',
'lsm.dll',
'maintenanceui.dll',
'mbaeapi.dll',
'mbaeapipublic.dll',
'mbaexmlparser.dll',
'mbsmsapi.dll',
'mbussdapi.dll',
'memorydiagnostic.dll',
'mfasfsrcsnk.dll',
'mfcaptureengine.dll',
'mfcore.dll',
'mfmediaengine.dll',
'mfmp4srcsnk.dll',
'mfmpeg2srcsnk.dll',
'mfnetcore.dll',
'mfnetsrc.dll',
'mfsrcsnk.dll',
'mfsvr.dll',
'mftranscode.dll',
'microsoft-windows-battery-events.dll',
'microsoft-windows-kernel-pnp-events.dll',
'microsoft-windows-pdc.dll',
'microsoft-windows-processor-aggregator-events.dll',
'migflt.dll',
'miutils.dll',
'mprext.dll',
'mrmcorer.dll',
'mrmindexer.dll',
'msauddecmft.dll',
'msched.dll',
'msidcrl40.dll',
'msiwer.dll',
'mskeyprotcli.dll',
'mskeyprotect.dll',
'mspatchc.dll',
'msspellcheckingfacility.dll',
'mstextprediction.dll',
'msvcp110_clr0400.dll',
'msvcr100_clr0400.dll',
'msvcr110_clr0400.dll',
'msvideodsp.dll',
'msvproc.dll',
'ncaapi.dll',
'ncasvc.dll',
'ncbservice.dll',
'ncdautosetup.dll',
'ncryptprov.dll',
'ncryptsslp.dll',
'ndisimplatform.dll',
'nduprov.dll',
'netprofmsvc.dll',
'netprovisionsp.dll',
'networkstatus.dll',
'nlmproxy.dll',
'ntasn1.dll',
'ntvdmcpl.dll',
'oemlicense.dll',
'osksupport.dll',
'packagestateroaming.dll',
'pcacli.dll',
'pcpksp.dll',
'pcptpm12.dll',
'peerdistad.dll',
'peerdistcleaner.dll',
'playlistfolder.dll',
'playtomanager.dll',
'playtostatusprovider.dll',
'pnppolicy.dll',
'printdialogs.dll',
'profext.dll',
'profsvcext.dll',
'provcore.dll',
'proximitycommon.dll',
'proximityservice.dll',
'prvdmofcomp.dll',
'psmodulediscoveryprovider.dll',
'psmsrv.dll',
'pstask.dll',
'purchasewindowslicense.dll',
'pwlauncher.dll',
'pwsso.dll',
'rdpcorets.dll',
'rdpudd.dll',
'rdsappxhelper.dll',
'rdsdwmdr.dll',
'rdvvmtransport.dll',
'removedevicecontexthandler.dll',
'removedeviceelevated.dll',
'reseteng.dll',
'resetengmig.dll',
'rfxvmt.dll',
'rmapi.dll',
'roamingsecurity.dll',
'rometadata.dll',
'rotmgr.dll',
'scavengeui.dll',
'sensorperformanceevents.dll',
'setnetworklocation.dll',
'settingmonitor.dll',
'settingsync.dll',
'settingsyncinfo.dll',
'shcore.dll',
'simauth.dll',
'simcfg.dll',
'smartcardsimulator.dll',
'smbwmiv2.dll',
'smsdeviceaccessrevocation.dll',
'smspace.dll',
'smsrouter.dll',
'sortwindows61.dll',
'spacecontrol.dll',
'spmpm.dll',
'sqlcecompact40.dll',
'sqlceoledb40.dll',
'sqlceqp40.dll',
'sqlcese40.dll',
'srevents.dll',
'srh.dll',
'srm.dll',
'srm_ps.dll',
'srmclient.dll',
'srmlib.dll',
'srmscan.dll',
'srmshell.dll',
'srmstormod.dll',
'srmtrace.dll',
'srumapi.dll',
'srumsvc.dll',
'sscoreext.dll',
'startupscan.dll',
'storagewmi.dll',
'storagewmi_passthru.dll',
'storewuauth.dll',
'storsvc.dll',
'subscriptionmgr.dll',
'svsvc.dll',
'systemeventsbrokerclient.dll',
'systemeventsbrokerserver.dll',
'threadpoolwinrt.dll',
'timebrokerclient.dll',
'timebrokerserver.dll',
'timesynctask.dll',
'tpmtasks.dll',
'tpmvsc.dll',
'tssrvlic.dll',
'tsusbgdcoinstaller.dll',
'tsusbredirectiongrouppolicyextension.dll',
'ttlsauth.dll',
'ttlscfg.dll',
'twinapi.dll',
'twinui.dll',
'uiautomationcoreres.dll',
'uireng.dll',
'umpoext.dll',
'umpowmi.dll',
'userinitext.dll',
'userlanguageprofilecallback.dll',
'userlanguagescpl.dll',
'ustprov.dll',
'vaultroaming.dll',
'vmapplicationhealthmonitorproxy.dll',
'vmrdvcore.dll',
'vscmgrps.dll',
'wcmapi.dll',
'wcmcsp.dll',
'wcmsvc.dll',
'wdfres.dll',
'webcamui.dll',
'websocket.dll',
'wfdprov.dll',
'windows.applicationmodel.background.systemeventsbroker.dll',
'windows.applicationmodel.background.timebroker.dll',
'windows.applicationmodel.dll',
'windows.applicationmodel.store.dll',
'windows.applicationmodel.store.testingframework.dll',
'windows.devices.enumeration.dll',
'windows.devices.enumeration.ps.dll',
'windows.devices.geolocation.dll',
'windows.devices.portable.dll',
'windows.devices.printers.extensions.dll',
'windows.devices.sensors.dll',
'windows.globalization.dll',
'windows.globalization.fontgroups.dll',
'windows.graphics.dll',
'windows.graphics.printing.dll',
'windows.help.runtime.dll',
'windows.immersiveshell.serviceprovider.dll',
'windows.media.devices.dll',
'windows.media.dll',
'windows.media.mediacontrol.dll',
'windows.media.renewal.dll',
'windows.media.streaming.dll',
'windows.media.streaming.ps.dll',
'windows.networking.backgroundtransfer.dll',
'windows.networking.connectivity.dll',
'windows.networking.dll',
'windows.networking.networkoperators.hotspotauthentication.dll',
'windows.networking.proximity.dll',
'windows.networking.sockets.pushenabledapplication.dll',
'windows.security.authentication.onlineid.dll',
'windows.security.credentials.ui.credentialpicker.dll',
'windows.storage.applicationdata.dll',
'windows.storage.compression.dll',
'windows.system.display.dll',
'windows.system.profile.hardwareid.dll',
'windows.system.remotedesktop.dll',
'windows.ui.dll',
'windows.ui.immersive.dll',
'windows.ui.input.inking.dll',
'windows.ui.xaml.dll',
'windows.web.dll',
'windowslivelogin.dll',
'wininitext.dll',
'winlangdb.dll',
'winmde.dll',
'winmmbase.dll',
'winmsoirmprotector.dll',
'winopcirmprotector.dll',
'winsku.dll',
'wintypes.dll',
'wisp.dll',
'witnesswmiv2provider.dll',
'wkspbrokerax.dll',
'wlanradiomanager.dll',
'wlidcli.dll',
'wlidcredprov.dll',
'wlidfdp.dll',
'wlidnsp.dll',
'wlidprov.dll',
'wlidres.dll',
'wlidsvc.dll',
'wlroamextension.dll',
'wmidcom.dll',
'wmitomi.dll',
'workerdd.dll',
'wpcwebsync.dll',
'wpnapps.dll',
'wpncore.dll',
'wpninprc.dll',
'wpnprv.dll',
'wpnsruprov.dll',
'wsclient.dll',
'wsmagent.dll',
'wsservice.dll',
'wsshared.dll',
'wssync.dll',
'wuaext.dll',
'wusettingsprovider.dll',
'wushareduxresources.dll',
'wwaapi.dll',
'wwanradiomanager.dll',
'xaudio2_8.dll',
'xinput1_4.dll',
])
WIN10_DLLS = frozenset([
'aadauthhelper.dll',
'aadcloudap.dll',
'aadtb.dll',
'abovelockapphost.dll',
'accountaccessor.dll',
'accountscontrolinternal.dll',
'accountsrt.dll',
'acmigration.dll',
'acpbackgroundmanagerpolicy.dll',
'activationclient.dll',
'activationmanager.dll',
'activesynccsp.dll',
'activesyncprovider.dll',
'addressparser.dll',
'advapi32res.dll',
'aeproam.dll',
'ajrouter.dll',
'amsi.dll',
'amsiproxy.dll',
'aphostclient.dll',
'aphostres.dll',
'aphostservice.dll',
'api-ms-win-core-file-l2-1-0.dll',
'api-ms-win-core-file-l2-1-1.dll',
'api-ms-win-core-heap-obsolete-l1-1-0.dll',
'api-ms-win-core-kernel32-private-l1-1-0.dll',
'api-ms-win-core-kernel32-private-l1-1-1.dll',
'api-ms-win-core-localization-obsolete-l1-2-0.dll',
'api-ms-win-core-string-l2-1-0.dll',
'api-ms-win-core-string-obsolete-l1-1-0.dll',
'api-ms-win-core-stringloader-l1-1-1.dll',
'api-ms-win-core-xstate-l2-1-0.dll',
'api-ms-win-devices-config-l1-1-0.dll',
'api-ms-win-devices-config-l1-1-1.dll',
'api-ms-win-eventing-classicprovider-l1-1-0.dll',
'api-ms-win-eventing-controller-l1-1-0.dll',
'api-ms-win-eventing-legacy-l1-1-0.dll',
'api-ms-win-eventing-provider-l1-1-0.dll',
'api-ms-win-eventlog-legacy-l1-1-0.dll',
'api-ms-win-security-lsalookup-l2-1-0.dll',
'api-ms-win-security-lsalookup-l2-1-1.dll',
'api-ms-win-security-lsapolicy-l1-1-0.dll',
'api-ms-win-security-provider-l1-1-0.dll',
'api-ms-win-service-private-l1-1-1.dll',
'appcapture.dll',
'appcontracts.dll',
'applicationframe.dll',
'applockercsp.dll',
'appointmentactivation.dll',
'appointmentapis.dll',
'appraiser.dll',
'appreadiness.dll',
'appwiz.cpl',
'appxapplicabilityblob.dll',
'appxapplicabilityengine.dll',
'atlthunk.dll',
'authbrokerui.dll',
'autoworkplacen.dll',
'azuresettingsyncprovider.dll',
'backgroundmediapolicy.dll',
'bcastdvr.proxy.dll',
'bdesysprep.dll',
'bingasds.dll',
'bingmaps.dll',
'bingonlineservices.dll',
'bitsproxy.dll',
'brokerlib.dll',
'browserbroker.dll',
'browsersettingsync.dll',
'bthprops.cpl',
'bthtelemetry.dll',
'c_gsm7.dll',
'callhistoryclient.dll',
'cameracaptureui.dll',
'capauthz.dll',
'castlaunch.dll',
'cbtbackgroundmanagerpolicy.dll',
'cdp.dll',
'cdpsvc.dll',
'cellularapi.dll',
'cemapi.dll',
'cfgsppolicy.dll',
'chakra.dll',
'chakradiag.dll',
'chatapis.dll',
'clipboardserver.dll',
'clipc.dll',
'clipsvc.dll',
'cloudap.dll',
'clouddomainjoinaug.dll',
'clouddomainjoindatamodelserver.dll',
'cloudexperiencehost.dll',
'cloudexperiencehostbroker.dll',
'cloudexperiencehostcommon.dll',
'cloudexperiencehostuser.dll',
'cmgrcspps.dll',
'cmintegrator.dll',
'coml2.dll',
'commstypehelperutil_ca.dll',
'comppkgsup.dll',
'configmanager2.dll',
'configurationclient.dll',
'configureexpandedstorage.dll',
'conhostv1.dll',
'conhostv2.dll',
'consolelogon.dll',
'contactactivation.dll',
'contactapis.dll',
'contactharvesterds.dll',
'contentdeliverymanager.utilities.dll',
'coredpus.dll',
'coremessaging.dll',
'coremmres.dll',
'coreuicomponents.dll',
'cortana.persona.dll',
'cortanamapihelper.dll',
'cortanamapihelper.proxystub.dll',
'courtesyengine.dll',
'credentialmigrationhandler.dll',
'credprovdatamodel.dll',
'credprovhost.dll',
'credprovs.dll',
'crypttpmeksvc.dll',
'csystemeventsbrokerclient.dll',
'd3d12.dll',
'd3dcompiler_47.dll',
'dab.dll',
'dabapi.dll',
'dafcdp.dll',
'dafdnssd.dll',
'dafdockingprovider.dll',
'dafpos.dll',
'dafwiprov.dll',
'damediamanager.dll',
'dataexchange.dll',
'datasensehandlers.dll',
'davsyncprovider.dll',
'dbgcore.dll',
'dbgmodel.dll',
'dcpapi.dll',
'dcpsvc.dll',
'dcpurapi.dll',
'ddds.dll',
'defragres.dll',
'desk.cpl',
'desktopshellext.dll',
'developeroptionssettingshandlers.dll',
'deviceregistration.dll',
'devicesflowbroker.dll',
'devquerybroker.dll',
'diagnosticlogcsp.dll',
'diagtrack.dll',
'diagtrack_win.dll',
'diagtrack_wininternal.dll',
'dialclient.dll',
'dialserver.dll',
'dictationmanager.dll',
'directmanipulation.dll',
'displaymanager.dll',
'dmapisetextimpl.dll',
'dmappsres.dll',
'dmcfgutils.dll',
'dmcommandlineutils.dll',
'dmcsps.dll',
'dmenrollengine.dll',
'dmiso8601utils.dll',
'dmoleaututils.dll',
'dmprocessxmlfiltered.dll',
'dmpushproxy.dll',
'dmpushroutercore.dll',
'dmrserver.dll',
'dmwappushsvc.dll',
'dmwmicsp.dll',
'dmxmlhelputils.dll',
'dockinterface.proxystub.dll',
'dolbydecmft.dll',
'domgmt.dll',
'dosvc.dll',
'dot3conn.dll',
'dsccore.dll',
'dsclient.dll',
'dscproxy.dll',
'dsctimer.dll',
'dssvc.dll',
'dwmghost.dll',
'dwminit.dll',
'eamprogresshandler.dll',
'eapprovp.dll',
'easpoliciesbroker.dll',
'easpoliciesbrokerps.dll',
'edgehtml.dll',
'editbuffertesthook.dll',
'editionupgradehelper.dll',
'edpauditapi.dll',
'edputil.dll',
'eeprov.dll',
'eeutil.dll',
'efsext.dll',
'efswrt.dll',
'emailapis.dll',
'embeddedapplauncherconfig.dll',
'enrollmentapi.dll',
'enrolluxdll.dll',
'enterpriseappmgmtclient.dll',
'enterpriseappmgmtsvc.dll',
'enterprisecsps.dll',
'enterprisedesktopappmgmtcsp.dll',
'enterpriseetw.dll',
'enterprisemodernappmgmtcsp.dll',
'enterpriseresourcemanager.dll',
'errordetails.dll',
'esdsip.dll',
'esevss.dll',
'ethernetmediamanager.dll',
'etweseproviderresources.dll',
'eventaggregation.dll',
'execmodelclient.dll',
'execmodelproxy.dll',
'exsmime.dll',
'extrasxmlparser.dll',
'family.authentication.dll',
'family.cache.dll',
'family.client.dll',
'family.syncengine.core.dll',
'familysafetyext.dll',
'faxprinterinstaller.dll',
'fhsettingsprovider.dll',
'fingerprintcredential.dll',
'fingerprintenrollment.dll',
'firewall.cpl',
'flightsettings.dll',
'fontglyphanimator.dll',
'fontgroupsoverride.dll',
'fontprovider.dll',
'fwbase.dll',
'fwpolicyiomgr.dll',
'gamingtcui.dll',
'generaltel.dll',
'geocommon.dll',
'geolocation.dll',
'geolocatorhelper.dll',
'globcollationhost.dll',
'globinputhost.dll',
'gnssadapter.dll',
'halextpl080.dll',
'hascsp.dll',
'hdwwiz.cpl',
'hevcdecoder.dll',
'hmkd.dll',
'hrtfapo.dll',
'httpprxc.dll',
'httpsdatasource.dll',
'ieetwcollectorres.dll',
'ieetwproxystub.dll',
'ieproxy.dll',
'ihvrilproxy.dll',
'implatsetup.dll',
'inetcpl.cpl',
'inkanalysis.dll',
'inkobjcore.dll',
'inproclogger.dll',
'inputinjectionbroker.dll',
'inputlocalemanager.dll',
'inputservice.dll',
'internetmail.dll',
'internetmailcsp.dll',
'intl.cpl',
'invagent.dll',
'iotassignedaccesslockframework.dll',
'ipeloggingdictationhelper.dll',
'iri.dll',
'irprops.cpl',
'javascriptcollectionagent.dll',
'joinproviderol.dll',
'joinutil.dll',
'joy.cpl',
'jpmapcontrol.dll',
'jscript9diag.dll',
'kbdazst.dll',
'kbdbug.dll',
'kbddzo.dll',
'kbdfthrk.dll',
'kbdgn.dll',
'kbdgthc.dll',
'kbdjav.dll',
'kbdlvst.dll',
'kbdmonst.dll',
'kbdolch.dll',
'kbdoldit.dll',
'kbdosm.dll',
'kbdsora.dll',
'kbdtiprd.dll',
'kbdtt102.dll',
'kbdtzm.dll',
'kd_0c_8086.dll',
'kd_02_10df.dll',
'kd_02_19a2.dll',
'kd_02_1969.dll',
'kd_07_1415.dll',
'kdnet_uart16550.dll',
'kerbclientshared.dll',
'kernel.appcore.dll',
'keyworddetectormsftsidadapter.dll',
'knobscore.dll',
'knobscsp.dll',
'languagecomponentsinstaller.dll',
'legacynetux.dll',
'lfsvc.dll',
'licensemanager.dll',
'licensemanagerapi.dll',
'licensemanagersvc.dll',
'licensingcsp.dll',
'locationcelladapter.dll',
'locationcrowdsource.dll',
'locationframework.dll',
'locationframeworkinternalps.dll',
'locationframeworkps.dll',
'locationgeofences.dll',
'locationpecell.dll',
'locationpecomposite.dll',
'locationpegnss.dll',
'locationpeip.dll',
'locationpelegacywinlocation.dll',
'locationpermissions.dll',
'locationpewifi.dll',
'locationsystemintegration.dll',
'locationwebproxy.dll',
'locationwifiadapter.dll',
'locationwinpalmisc.dll',
'lockappbroker.dll',
'lockscreencontent.dll',
'lockscreencontenthost.dll',
'logoncontroller.dll',
'main.cpl',
'mapconfiguration.dll',
'mapcontrolcore.dll',
'mapcontrolstringsres.dll',
'mapsbtsvc.dll',
'mapsbtsvcproxy.dll',
'mapsstore.dll',
'mapstoasttask.dll',
'mapsupdatetask.dll',
'mbmediamanager.dll',
'mccsengineshared.dll',
'mccspal.dll',
'mcrecvsrc.dll',
'mdmmigrator.dll',
'mdmregistration.dll',
'messagingdatamodel2.dll',
'mfh263enc.dll',
'mfh265enc.dll',
'mfmkvsrcsnk.dll',
'mfperfhelper.dll',
'mibincodec.dll',
'microsoft-windows-appmodelexecevents.dll',
'microsoft-windows-mapcontrols.dll',
'microsoft-windows-moshost.dll',
'microsoft-windows-mostrace.dll',
'microsoft-windows-sleepstudy-events.dll',
'microsoft-windows-storage-tiering-events.dll',
'microsoft-windows-system-events.dll',
'microsoft.management.infrastructure.native.unmanaged.dll',
'microsoftaccountcloudap.dll',
'microsoftaccountextension.dll',
'microsoftaccounttokenprovider.dll',
'mimofcodec.dll',
'miracastinputmgr.dll',
'miracastreceiver.dll',
'mispace.dll',
'mmsys.cpl',
'modernexecserver.dll',
'mos.dll',
'moshost.dll',
'moshostclient.dll',
'moshostcore.dll',
'mpeval.dll',
'mpunits.dll',
'mrt100.dll',
'mrt_map.dll',
'ms3dthumbnailprovider.dll',
'msajapi.dll',
'msalacdecoder.dll',
'msalacencoder.dll',
'msamrnbdecoder.dll',
'msamrnbencoder.dll',
'msamrnbsink.dll',
'msamrnbsource.dll',
'msauserext.dll',
'msctfuimanager.dll',
'msflacdecoder.dll',
'msflacencoder.dll',
'mshtmldac.dll',
'msphotography.dll',
'msvcp120_clr0400.dll',
'mswb7.dll',
'mtf.dll',
'mtfserver.dll',
'musdialoghandlers.dll',
'musupdatehandlers.dll',
'nativemap.dll',
'navshutdown.dll',
'ncpa.cpl',
'ncuprov.dll',
'ndisimplatformnetcfg.dll',
'netprovfw.dll',
'netsetupapi.dll',
'netsetupengine.dll',
'netsetupshim.dll',
'netsetupsvc.dll',
'networkbindingenginemigplugin.dll',
'networkcollectionagent.dll',
'networkdesktopsettings.dll',
'networkhelper.dll',
'networkmobilesettings.dll',
'netwphelper.dll',
'nfcprovisioningplugin.dll',
'nfcradiomedia.dll',
'ngccredprov.dll',
'ngcctnr.dll',
'ngcctnrgidshandler.dll',
'ngcctnrsvc.dll',
'ngckeyenum.dll',
'ngcksp.dll',
'ngcpopkeysrv.dll',
'ngcprocsp.dll',
'ngcsvc.dll',
'ngctasks.dll',
'nmaa.dll',
'notificationcontroller.dll',
'notificationcontrollerps.dll',
'notificationobjfactory.dll',
'notificationplatformcomponent.dll',
'npsmdesktopprovider.dll',
'ntlmshared.dll',
'oemlicense.dll',
'offlinelsa.dll',
'offlinesam.dll',
'offreg.dll',
'omadmagent.dll',
'omadmapi.dll',
'ondemandbrokerclient.dll',
'ondemandconnroutehelper.dll',
'onebackuphandler.dll',
'onedrivesettingsyncprovider.dll',
'pcsvdevice.dll',
'peerdistcacheprovider.dll',
'personax.dll',
'phonecallhistoryapis.dll',
'phoneom.dll',
'phoneplatformabstraction.dll',
'phoneservice.dll',
'phoneserviceres.dll',
'phoneutil.dll',
'phoneutilres.dll',
'pimindexmaintenance.dll',
'pimindexmaintenanceclient.dll',
'pimstore.dll',
'pinenrollment.dll',
'playtodevice.dll',
'playtomenu.dll',
'playtoreceiver.dll',
'ploptin.dll',
'pnpclean.dll',
'policymanagerprecheck.dll',
'posyncservices.dll',
'powercfg.cpl',
'prauthproviders.dll',
'printdialogs3d.dll',
'printplatformconfig.dll',
'printwsdahost.dll',
'prm0009.dll',
'provdatastore.dll',
'provengine.dll',
'provhandlers.dll',
'provisioningcsp.dll',
'provisioninghandlers.dll',
'provops.dll',
'provpackageapidll.dll',
'provplatformdesktop.dll',
'provplugineng.dll',
'proximitycommonpal.dll',
'proximityrtapipal.dll',
'proximityservicepal.dll',
'psmserviceexthost.dll',
'quickactionsdatamodel.dll',
'radcui.dll',
'raschapext.dll',
'rascustom.dll',
'rasmediamanager.dll',
'rastlsext.dll',
'rdbui.dll',
'rdpsaps.dll',
'rdvidcrl.dll',
'rdxservice.dll',
'rdxtaskfactory.dll',
'readingviewresources.dll',
'reagenttask.dll',
'reinfo.dll',
'remoteaudioendpoint.dll',
'remotenaturallanguage.dll',
'remotewipecsp.dll',
'removablemediaprovisioningplugin.dll',
'reportingcsp.dll',
'rilproxy.dll',
'rmsroamingsecurity.dll',
'rtmediaframe.dll',
'rtworkq.dll',
'sbservicetrigger.dll',
'scapi.dll',
'scdeviceenum.dll',
'search.protocolhandler.mapi2.dll',
'sebbackgroundmanagerpolicy.dll',
'sensorcustomadbalgorithm.dll',
'sensorservice.dll',
'sensorsnativeapi.dll',
'sensorsnativeapi.v2.dll',
'sensorsutilsv2.dll',
'setproxycredential.dll',
'settingsextensibilityhandlers.dll',
'settingshandlers_closedcaptioning.dll',
'settingshandlers_flashlight.dll',
'settingshandlers_geolocation.dll',
'settingshandlers_maps.dll',
'settingshandlers_notifications.dll',
'settingshandlers_nt.dll',
'settingshandlers_onecore_batterysaver.dll',
'settingshandlers_privacy.dll',
'settingshandlers_signinoptions.dll',
'settingshandlers_siuf.dll',
'settingshandlers_storagesense.dll',
'settingshandlers_useraccount.dll',
'settingsynccore.dll',
'settingsyncpolicy.dll',
'sharedstartmodelshim.dll',
'sharehost.dll',
'shutdownux.dll',
'slpts.dll',
'smphost.dll',
'smsroutersvc.dll',
'speechpal.dll',
'srhinproc.dll',
'staterepository.core.dll',
'storageusage.dll',
'storeagent.dll',
'suplcsps.dll',
'surfacehubhandlers.dll',
'synccontroller.dll',
'syncmlhook.dll',
'syncproxy.dll',
'syncres.dll',
'syncsettings.dll',
'syncutil.dll',
'sysdm.cpl',
'systemsettings.datamodel.dll',
'systemsettings.deviceencryptionhandlers.dll',
'systemsettings.handlers.dll',
'systemsettings.useraccountshandlers.dll',
'systemsettingsthresholdadminflowui.dll',
'tabletpc.cpl',
'tbauth.dll',
'telephon.cpl',
'tetheringclient.dll',
'tetheringconfigsp.dll',
'tetheringieprovider.dll',
'tetheringmgr.dll',
'tetheringservice.dll',
'tetheringstation.dll',
'textinputframework.dll',
'tileobjserver.dll',
'timedate.cpl',
'tokenbinding.dll',
'tokenbroker.dll',
'tokenbrokerui.dll',
'tpmcertresources.dll',
'tpmcoreprovisioning.dll',
'tssessionux.dll',
'ttlsext.dll',
'twinui.appcore.dll',
'tzsyncres.dll',
'ucrtbase.dll',
'umpo-overrides-base.dll',
'umpo-overrides-xpc.dll',
'unenrollhook.dll',
'unistore.dll',
'updatecsp.dll',
'updatehandlers.dll',
'updatepolicy.dll',
'userdataaccessres.dll',
'userdataaccountapis.dll',
'userdatalanguageutil.dll',
'userdataplatformhelperutil.dll',
'userdataservice.dll',
'userdatatimeutil.dll',
'userdatatypehelperutil.dll',
'userdeviceregistration.dll',
'userdeviceregistration.ngc.dll',
'usermgr.dll',
'usermgrcli.dll',
'usermgrproxy.dll',
'usoapi.dll',
'usocore.dll',
'uvoipbackgroundmanagerpolicy.dll',
'vboxd3d9wddm.dll',
'vcardparser.dll',
'vedatalayerhelpers.dll',
'veeventdispatcher.dll',
'vestoreeventhandlers.dll',
'voiceactivationmanager.dll',
'vpnv2csp.dll',
'walletbackgroundserviceproxy.dll',
'walletproxy.dll',
'walletservice.dll',
'wephostsvc.dll',
'weretw.dll',
'wificonfigsp.dll',
'wificonnapi.dll',
'wifidisplay.dll',
'wifinetworkmanager.dll',
'wifiprofilessettinghandler.dll',
'winbici.dll',
'winbiodatamodel.dll',
'winbioext.dll',
'windows.accountscontrol.dll',
'windows.applicationmodel.core.dll',
'windows.applicationmodel.lockscreen.dll',
'windows.applicationmodel.wallet.dll',
'windows.clouddomainjoinaug.proxystub.dll',
'windows.cortana.dll',
'windows.cortana.pal.desktop.dll',
'windows.cortana.proxystub.dll',
'windows.data.pdf.dll',
'windows.devices.alljoyn.dll',
'windows.devices.background.dll',
'windows.devices.background.ps.dll',
'windows.devices.bluetooth.dll',
'windows.devices.custom.dll',
'windows.devices.custom.ps.dll',
'windows.devices.humaninterfacedevice.dll',
'windows.devices.lights.dll',
'windows.devices.lowlevel.dll',
'windows.devices.midi.dll',
'windows.devices.perception.dll',
'windows.devices.picker.dll',
'windows.devices.pointofservice.dll',
'windows.devices.printers.dll',
'windows.devices.radios.dll',
'windows.devices.scanners.dll',
'windows.devices.serialcommunication.dll',
'windows.devices.smartcards.dll',
'windows.devices.usb.dll',
'windows.devices.wifi.dll',
'windows.devices.wifidirect.dll',
'windows.energy.dll',
'windows.gaming.input.dll',
'windows.gaming.preview.dll',
'windows.gaming.xboxlive.storage.dll',
'windows.graphics.printing.3d.dll',
'windows.internal.bluetooth.dll',
'windows.internal.management.dll',
'windows.internal.shell.broker.dll',
'windows.internal.ui.bioenrollment.proxystub.dll',
'windows.internal.ui.logon.proxystub.dll',
'windows.management.lockdown.dll',
'windows.management.provisioning.proxystub.dll',
'windows.management.workplace.workplacesettings.dll',
'windows.media.audio.dll',
'windows.media.backgroundmediaplayback.dll',
'windows.media.editing.dll',
'windows.media.faceanalysis.dll',
'windows.media.ocr.dll',
'windows.media.photo.import.dll',
'windows.media.playback.backgroundmediaplayer.dll',
'windows.media.playback.mediaplayer.dll',
'windows.media.playback.proxystub.dll',
'windows.media.protection.playready.dll',
'windows.media.speech.dll',
'windows.media.speech.uxres.dll',
'windows.networking.backgroundtransfer.backgroundmanagerpolicy.dll',
'windows.networking.backgroundtransfer.contentprefetchtask.dll',
'windows.networking.hostname.dll',
'windows.networking.servicediscovery.dnssd.dll',
'windows.networking.ux.eaprequesthandler.dll',
'windows.networking.ux.proxystub.dll',
'windows.networking.vpn.dll',
'windows.security.authentication.web.core.dll',
'windows.security.credentials.ui.userconsentverifier.dll',
'windows.shell.search.urihandler.dll',
'windows.shell.servicehostbuilder.dll',
'windows.speech.pal.dll',
'windows.staterepository.dll',
'windows.storage.dll',
'windows.storage.search.dll',
'windows.system.diagnostics.dll',
'windows.system.launcher.dll',
'windows.system.profile.retailinfo.dll',
'windows.system.profile.systemmanufacturers.dll',
'windows.system.systemmanagement.dll',
'windows.ui.biofeedback.dll',
'windows.ui.blockedshutdown.dll',
'windows.ui.core.textinput.dll',
'windows.ui.cred.dll',
'windows.ui.logon.dll',
'windows.ui.picturepassword.dll',
'windows.ui.search.dll',
'windows.ui.shell.dll',
'windows.ui.xaml.maps.dll',
'windows.ui.xaml.phone.dll',
'windows.ui.xaml.resources.dll',
'windows.web.diagnostics.dll',
'windows.web.http.dll',
'windows.xbox.networking.proxystub.dll',
'windowscodecsraw.dll',
'windowsperformancerecordercontrol.dll',
'wined3dwddm.dll',
'winipcfile.dll',
'winipcsecproc.dll',
'winipcsecproc_ssp.dll',
'winjson.dll',
'winlogonext.dll',
'winmsipc.dll',
'winnlsres.dll',
'winrttracing.dll',
'winsetupui.dll',
'wlanmediamanager.dll',
'wlansvcpal.dll',
'woftasks.dll',
'wofutil.dll',
'wordbreakers.dll',
'workfolderscontrol.dll',
'workfoldersgpext.dll',
'workfoldersres.dll',
'workfoldersshell.dll',
'workfolderssvc.dll',
'wpbcreds.dll',
'wpkbdlayout.dll',
'wpnservice.dll',
'wpportinglibrary.dll',
'wpprecorderum.dll',
'wptaskscheduler.dll',
'wpx.dll',
'wscui.cpl',
'wshhyperv.dll',
'wsp_fs.dll',
'wsp_health.dll',
'wsp_sr.dll',
'wsplib.dll',
'wuau.dll',
'wuautoappupdate.dll',
'wudfsmcclassext.dll',
'wudfx02000.dll',
'wuuhext.dll',
'wwaext.dll',
'xamldiagnostics.dll',
'xaudio2_9.dll',
'xblauthmanager.dll',
'xblauthmanagerproxy.dll',
'xblauthtokenbrokerext.dll',
'xblgamesave.dll',
'xblgamesaveproxy.dll',
'xboxnetapisvc.dll',
'xinputuap.dll',
'xpsdocumenttargetprint.dll',
'ztrace_ca.dll',
'ztrace_maps.dll',
])
COMMON_IMPORT_LIBS = COMMON_IMPORT_LIBS | WIN7_DLLS | WIN8_DLLS
| none | 1 | 1.135288 | 1 | |
WebMirror/management/rss_parser_funcs/feed_parse_extractBllovetranslationsWordpressCom.py | fake-name/ReadableWebProxy | 193 | 6619109 | def extractBllovetranslationsWordpressCom(item):
'''
Parser for 'bllovetranslations.wordpress.com'
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return None
tagmap = [
('CDAW', 'Continuation of the Dream in Another World', 'translated'),
('aak', 'ai wo ataeru kemono-tachi', 'translated'),
('ai wo ataeru kemono-tachi', 'ai wo ataeru kemono-tachi', 'translated'),
('Sweet Daydream', 'Sweet Daydream', 'translated'),
('PRC', 'PRC', 'translated'),
('Loiterous', 'Loiterous', 'oel'),
]
for tagname, name, tl_type in tagmap:
if tagname in item['tags']:
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False | def extractBllovetranslationsWordpressCom(item):
'''
Parser for 'bllovetranslations.wordpress.com'
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return None
tagmap = [
('CDAW', 'Continuation of the Dream in Another World', 'translated'),
('aak', 'ai wo ataeru kemono-tachi', 'translated'),
('ai wo ataeru kemono-tachi', 'ai wo ataeru kemono-tachi', 'translated'),
('Sweet Daydream', 'Sweet Daydream', 'translated'),
('PRC', 'PRC', 'translated'),
('Loiterous', 'Loiterous', 'oel'),
]
for tagname, name, tl_type in tagmap:
if tagname in item['tags']:
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False | en | 0.411155 | Parser for 'bllovetranslations.wordpress.com' | 2.493118 | 2 |
src/retinaface/model.py | blurry-mood/NonMaxSuppression-with-Reinforcement-Learning | 3 | 6619110 | <gh_stars>1-10
""" This repository was taken from https://github.com/biubug6/Pytorch_Retinaface"""
import os
from .models.retina import Retina
from .config import cfg_mnet
import torch
def check_keys(model, pretrained_state_dict):
ckpt_keys = set(pretrained_state_dict.keys())
model_keys = set(model.state_dict().keys())
used_pretrained_keys = model_keys & ckpt_keys
# unused_pretrained_keys = ckpt_keys - model_keys
# missing_keys = model_keys - ckpt_keys
# print('Missing keys:{}'.format(len(missing_keys)))
# print('Unused checkpoint keys:{}'.format(len(unused_pretrained_keys)))
# print('Used keys:{}'.format(len(used_pretrained_keys)))
assert len(used_pretrained_keys) > 0, 'load NONE from pretrained checkpoint'
return True
def remove_prefix(state_dict, prefix):
''' Old style model is stored with all names of parameters sharing common prefix 'module.' '''
print('remove prefix \'{}\''.format(prefix))
def f(x): return x.split(prefix, 1)[-1] if x.startswith(prefix) else x
return {f(key): value for key, value in state_dict.items()}
def _load_model(model, pretrained_path,):
print('Loading pretrained model from {}'.format(pretrained_path))
pretrained_dict = torch.load(
pretrained_path, map_location=lambda storage, loc: storage)
if "state_dict" in pretrained_dict.keys():
pretrained_dict = remove_prefix(
pretrained_dict['state_dict'], 'module.')
else:
pretrained_dict = remove_prefix(pretrained_dict, 'module.')
check_keys(model, pretrained_dict)
model.load_state_dict(pretrained_dict, strict=False)
return model
def get_trained_model(width, height=None):
if height is None:
height = width
PATH = os.path.split(os.path.abspath(__file__))[0]
cfg = cfg_mnet
# net and model
# Update input image width & height as you wish
net = Retina(cfg=cfg, phase='test', width=width, height=height)
net = _load_model(net, os.path.join(PATH, 'weights', 'mobilenet0.25_Final.pth'))
net.eval()
return net | """ This repository was taken from https://github.com/biubug6/Pytorch_Retinaface"""
import os
from .models.retina import Retina
from .config import cfg_mnet
import torch
def check_keys(model, pretrained_state_dict):
ckpt_keys = set(pretrained_state_dict.keys())
model_keys = set(model.state_dict().keys())
used_pretrained_keys = model_keys & ckpt_keys
# unused_pretrained_keys = ckpt_keys - model_keys
# missing_keys = model_keys - ckpt_keys
# print('Missing keys:{}'.format(len(missing_keys)))
# print('Unused checkpoint keys:{}'.format(len(unused_pretrained_keys)))
# print('Used keys:{}'.format(len(used_pretrained_keys)))
assert len(used_pretrained_keys) > 0, 'load NONE from pretrained checkpoint'
return True
def remove_prefix(state_dict, prefix):
''' Old style model is stored with all names of parameters sharing common prefix 'module.' '''
print('remove prefix \'{}\''.format(prefix))
def f(x): return x.split(prefix, 1)[-1] if x.startswith(prefix) else x
return {f(key): value for key, value in state_dict.items()}
def _load_model(model, pretrained_path,):
print('Loading pretrained model from {}'.format(pretrained_path))
pretrained_dict = torch.load(
pretrained_path, map_location=lambda storage, loc: storage)
if "state_dict" in pretrained_dict.keys():
pretrained_dict = remove_prefix(
pretrained_dict['state_dict'], 'module.')
else:
pretrained_dict = remove_prefix(pretrained_dict, 'module.')
check_keys(model, pretrained_dict)
model.load_state_dict(pretrained_dict, strict=False)
return model
def get_trained_model(width, height=None):
if height is None:
height = width
PATH = os.path.split(os.path.abspath(__file__))[0]
cfg = cfg_mnet
# net and model
# Update input image width & height as you wish
net = Retina(cfg=cfg, phase='test', width=width, height=height)
net = _load_model(net, os.path.join(PATH, 'weights', 'mobilenet0.25_Final.pth'))
net.eval()
return net | en | 0.524279 | This repository was taken from https://github.com/biubug6/Pytorch_Retinaface # unused_pretrained_keys = ckpt_keys - model_keys # missing_keys = model_keys - ckpt_keys # print('Missing keys:{}'.format(len(missing_keys))) # print('Unused checkpoint keys:{}'.format(len(unused_pretrained_keys))) # print('Used keys:{}'.format(len(used_pretrained_keys))) Old style model is stored with all names of parameters sharing common prefix 'module.' # net and model # Update input image width & height as you wish | 2.23958 | 2 |
tests/test_exporters.py | clemfromspace/scrapy-algolia-exporter | 1 | 6619111 | <gh_stars>1-10
"""This module contains the ``exporters`` related test cases"""
from unittest import TestCase
from unittest.mock import call, patch
from scrapy_algolia_exporter.exporters import AlgoliaItemExporter
class AlgoliaItemExporterTestCase(TestCase):
"""Test case for the ``AlgoliaItemExporter``"""
def setUp(self):
"""Initialize the ``AlgoliaItemExporter``"""
self.algolia_item_exporter = AlgoliaItemExporter(
algolia_api_id='api_id',
algolia_api_key='api_key',
algolia_index_name='index_name'
)
def test_start_exporting_should_initialize_the_algolia_client(self):
"""Test that the ``start_exporting`` method should initialize the Algolia client"""
with patch('algoliasearch.algoliasearch.Client') as mocked_algolia_client:
self.algolia_item_exporter.start_exporting()
mocked_algolia_client.assert_has_calls(
[
call('api_id', 'api_key'),
call().init_index('index_name')
]
)
self.assertEqual(self.algolia_item_exporter.next_items, [])
self.assertEqual(self.algolia_item_exporter.exported_items_nbr, 0)
def test_export_item_should_add_the_items_to_the_stack(self):
"""Test that the ``export_item`` method should add the item to the queue"""
with patch('algoliasearch.algoliasearch.Client'):
self.algolia_item_exporter.start_exporting()
# At first, we don't have any items in the queue
self.assertEqual(self.algolia_item_exporter.next_items, [])
# Export an item
self.algolia_item_exporter.export_item({'id': 1})
# We should have a new item in the queue
self.assertEqual(self.algolia_item_exporter.next_items, [{'id': 1}])
def test_export_item_should_call_add_objects_if_algolia_item_bulk_nbr_is_reached(self):
"""Test that the ``export_item`` method should call ``add_objects`` if ``algolia_item_bulk_nbr`` is reached"""
with patch('algoliasearch.algoliasearch.Client'):
self.algolia_item_exporter.start_exporting()
# Add 100 items
items = [
{'id': i}
for i in range(1, 102)
]
with patch.object(self.algolia_item_exporter.algolia_index, 'add_objects') as mocked:
for item in items:
self.algolia_item_exporter.export_item(item)
# The ``_export_items`` was called only once
mocked.assert_called_once_with(items[:100])
# The ``next_items`` must contains only 1 item
self.assertEqual(self.algolia_item_exporter.next_items, [{'id': 101}])
| """This module contains the ``exporters`` related test cases"""
from unittest import TestCase
from unittest.mock import call, patch
from scrapy_algolia_exporter.exporters import AlgoliaItemExporter
class AlgoliaItemExporterTestCase(TestCase):
"""Test case for the ``AlgoliaItemExporter``"""
def setUp(self):
"""Initialize the ``AlgoliaItemExporter``"""
self.algolia_item_exporter = AlgoliaItemExporter(
algolia_api_id='api_id',
algolia_api_key='api_key',
algolia_index_name='index_name'
)
def test_start_exporting_should_initialize_the_algolia_client(self):
"""Test that the ``start_exporting`` method should initialize the Algolia client"""
with patch('algoliasearch.algoliasearch.Client') as mocked_algolia_client:
self.algolia_item_exporter.start_exporting()
mocked_algolia_client.assert_has_calls(
[
call('api_id', 'api_key'),
call().init_index('index_name')
]
)
self.assertEqual(self.algolia_item_exporter.next_items, [])
self.assertEqual(self.algolia_item_exporter.exported_items_nbr, 0)
def test_export_item_should_add_the_items_to_the_stack(self):
"""Test that the ``export_item`` method should add the item to the queue"""
with patch('algoliasearch.algoliasearch.Client'):
self.algolia_item_exporter.start_exporting()
# At first, we don't have any items in the queue
self.assertEqual(self.algolia_item_exporter.next_items, [])
# Export an item
self.algolia_item_exporter.export_item({'id': 1})
# We should have a new item in the queue
self.assertEqual(self.algolia_item_exporter.next_items, [{'id': 1}])
def test_export_item_should_call_add_objects_if_algolia_item_bulk_nbr_is_reached(self):
"""Test that the ``export_item`` method should call ``add_objects`` if ``algolia_item_bulk_nbr`` is reached"""
with patch('algoliasearch.algoliasearch.Client'):
self.algolia_item_exporter.start_exporting()
# Add 100 items
items = [
{'id': i}
for i in range(1, 102)
]
with patch.object(self.algolia_item_exporter.algolia_index, 'add_objects') as mocked:
for item in items:
self.algolia_item_exporter.export_item(item)
# The ``_export_items`` was called only once
mocked.assert_called_once_with(items[:100])
# The ``next_items`` must contains only 1 item
self.assertEqual(self.algolia_item_exporter.next_items, [{'id': 101}]) | en | 0.786751 | This module contains the ``exporters`` related test cases Test case for the ``AlgoliaItemExporter`` Initialize the ``AlgoliaItemExporter`` Test that the ``start_exporting`` method should initialize the Algolia client Test that the ``export_item`` method should add the item to the queue # At first, we don't have any items in the queue # Export an item # We should have a new item in the queue Test that the ``export_item`` method should call ``add_objects`` if ``algolia_item_bulk_nbr`` is reached # Add 100 items # The ``_export_items`` was called only once # The ``next_items`` must contains only 1 item | 2.783453 | 3 |
modal_2fa/admin_apps.py | jonesim/django-2fa | 0 | 6619112 | <gh_stars>0
from django.contrib.admin.apps import AdminConfig
class AdminConfig2fa(AdminConfig):
default = False
default_site = 'modal_2fa.admin.AdminSite2FA'
| from django.contrib.admin.apps import AdminConfig
class AdminConfig2fa(AdminConfig):
default = False
default_site = 'modal_2fa.admin.AdminSite2FA' | none | 1 | 1.254785 | 1 | |
streetteam/apps/events/migrations/0002_improve_field_name.py | alysivji/street-team | 2 | 6619113 | <reponame>alysivji/street-team<filename>streetteam/apps/events/migrations/0002_improve_field_name.py
# Generated by Django 3.0.6 on 2020-05-18 00:02
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [("events", "0001_create_events_table")]
operations = [migrations.RenameField(model_name="event", old_name="happened_at", new_name="happens_on")]
| # Generated by Django 3.0.6 on 2020-05-18 00:02
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [("events", "0001_create_events_table")]
operations = [migrations.RenameField(model_name="event", old_name="happened_at", new_name="happens_on")] | en | 0.831153 | # Generated by Django 3.0.6 on 2020-05-18 00:02 | 1.878619 | 2 |
src/Zero-Node/FlaskServer/app.py | wiseby/Hackers-HomeAuto.Node | 0 | 6619114 | <reponame>wiseby/Hackers-HomeAuto.Node
from flask import Flask
app = Flask(__name__, static_folder='ClientApp/dist/InitConf', static_url_path="/")
@app.route('/', defaults={'path': ''})
@app.route('/<path:path>')
def catch_all(path):
return app.send_static_file("index.html")
@app.route('/api/config')
def postConfig(config):
print(config)
if (__name__ == '__main__'):
app.run()
| from flask import Flask
app = Flask(__name__, static_folder='ClientApp/dist/InitConf', static_url_path="/")
@app.route('/', defaults={'path': ''})
@app.route('/<path:path>')
def catch_all(path):
return app.send_static_file("index.html")
@app.route('/api/config')
def postConfig(config):
print(config)
if (__name__ == '__main__'):
app.run() | none | 1 | 2.166922 | 2 | |
my_university_api/application/api/mongodb/db.py | LittleBitProgrammer/myUniversity | 1 | 6619115 | import pymongo
from pymongo import MongoClient
CONNECTION_STRING = "mongodb+srv://dbMyUniversity:<EMAIL>/test?retryWrites=true&w=majority"
client = MongoClient(CONNECTION_STRING)
db = client.get_database('flask_mongodb_atlas')
user_collection = pymongo.collection.Collection(db, 'user_collection')
| import pymongo
from pymongo import MongoClient
CONNECTION_STRING = "mongodb+srv://dbMyUniversity:<EMAIL>/test?retryWrites=true&w=majority"
client = MongoClient(CONNECTION_STRING)
db = client.get_database('flask_mongodb_atlas')
user_collection = pymongo.collection.Collection(db, 'user_collection')
| none | 1 | 2.310875 | 2 | |
mtp_common/dates.py | ministryofjustice/money-to-prisoners-common | 7 | 6619116 | from govuk_bank_holidays import bank_holidays
class WorkdayChecker:
def __init__(self):
self.holidays = bank_holidays.BankHolidays()
def is_workday(self, date):
return self.holidays.is_work_day(date, division=bank_holidays.BankHolidays.ENGLAND_AND_WALES)
def get_next_workday(self, date):
return self.holidays.get_next_work_day(date=date, division=bank_holidays.BankHolidays.ENGLAND_AND_WALES)
def get_previous_workday(self, date):
return self.holidays.get_prev_work_day(date=date, division=bank_holidays.BankHolidays.ENGLAND_AND_WALES)
| from govuk_bank_holidays import bank_holidays
class WorkdayChecker:
def __init__(self):
self.holidays = bank_holidays.BankHolidays()
def is_workday(self, date):
return self.holidays.is_work_day(date, division=bank_holidays.BankHolidays.ENGLAND_AND_WALES)
def get_next_workday(self, date):
return self.holidays.get_next_work_day(date=date, division=bank_holidays.BankHolidays.ENGLAND_AND_WALES)
def get_previous_workday(self, date):
return self.holidays.get_prev_work_day(date=date, division=bank_holidays.BankHolidays.ENGLAND_AND_WALES)
| none | 1 | 2.797126 | 3 | |
ABC/021/a.py | fumiyanll23/AtCoder | 0 | 6619117 | <reponame>fumiyanll23/AtCoder
def main():
# input
N = int(input())
# compute
one = N % 2
two = N // 2
# output
print(one + two)
for _ in range(one):
print(1)
for _ in range(two):
print(2)
if __name__ == '__main__':
main()
| def main():
# input
N = int(input())
# compute
one = N % 2
two = N // 2
# output
print(one + two)
for _ in range(one):
print(1)
for _ in range(two):
print(2)
if __name__ == '__main__':
main() | en | 0.213219 | # input # compute # output | 3.792868 | 4 |
tests/unit/test_solvers/test_jax_bdf_solver.py | dalbamont/PyBaMM | 0 | 6619118 | <filename>tests/unit/test_solvers/test_jax_bdf_solver.py<gh_stars>0
import pybamm
import unittest
from tests import get_mesh_for_testing
import sys
import time
import numpy as np
from platform import system
@unittest.skipIf(system() == "Windows", "JAX not supported on windows")
class TestJaxBDFSolver(unittest.TestCase):
def test_solver(self):
# Create model
model = pybamm.BaseModel()
model.convert_to_format = "jax"
domain = ["negative electrode", "separator", "positive electrode"]
var = pybamm.Variable("var", domain=domain)
model.rhs = {var: 0.1 * var}
model.initial_conditions = {var: 1}
# No need to set parameters; can use base discretisation (no spatial operators)
# create discretisation
mesh = get_mesh_for_testing()
spatial_methods = {"macroscale": pybamm.FiniteVolume()}
disc = pybamm.Discretisation(mesh, spatial_methods)
disc.process_model(model)
# Solve
t_eval = np.linspace(0, 1, 80)
y0 = model.concatenated_initial_conditions.evaluate()
rhs = pybamm.EvaluatorJax(model.concatenated_rhs)
def fun(t, y, inputs):
return rhs.evaluate(t=t, y=y, inputs=inputs).reshape(-1)
t0 = time.perf_counter()
y, _ = pybamm.jax_bdf_integrate(
fun, y0, t_eval, inputs=None, rtol=1e-8, atol=1e-8)
t1 = time.perf_counter() - t0
# test accuracy
np.testing.assert_allclose(y[0, :], np.exp(0.1 * t_eval),
rtol=1e-7, atol=1e-7)
t0 = time.perf_counter()
y, _ = pybamm.jax_bdf_integrate(fun, y0, t_eval, rtol=1e-8, atol=1e-8)
t2 = time.perf_counter() - t0
# second run should be much quicker
self.assertLess(t2, t1)
# test second run is accurate
np.testing.assert_allclose(y[0, :], np.exp(0.1 * t_eval),
rtol=1e-7, atol=1e-7)
def test_solver_with_inputs(self):
# Create model
model = pybamm.BaseModel()
model.convert_to_format = "jax"
domain = ["negative electrode", "separator", "positive electrode"]
var = pybamm.Variable("var", domain=domain)
model.rhs = {var: -pybamm.InputParameter("rate") * var}
model.initial_conditions = {var: 1}
# create discretisation
mesh = get_mesh_for_testing()
spatial_methods = {"macroscale": pybamm.FiniteVolume()}
disc = pybamm.Discretisation(mesh, spatial_methods)
disc.process_model(model)
# Solve
t_eval = np.linspace(0, 10, 100)
y0 = model.concatenated_initial_conditions.evaluate()
rhs = pybamm.EvaluatorJax(model.concatenated_rhs)
def fun(t, y, inputs):
return rhs.evaluate(t=t, y=y, inputs=inputs).reshape(-1)
y, _ = pybamm.jax_bdf_integrate(fun, y0, t_eval, inputs={
"rate": 0.1}, rtol=1e-9, atol=1e-9)
np.testing.assert_allclose(y[0, :], np.exp(-0.1 * t_eval))
if __name__ == "__main__":
print("Add -v for more debug output")
if "-v" in sys.argv:
debug = True
pybamm.settings.debug_mode = True
unittest.main()
| <filename>tests/unit/test_solvers/test_jax_bdf_solver.py<gh_stars>0
import pybamm
import unittest
from tests import get_mesh_for_testing
import sys
import time
import numpy as np
from platform import system
@unittest.skipIf(system() == "Windows", "JAX not supported on windows")
class TestJaxBDFSolver(unittest.TestCase):
def test_solver(self):
# Create model
model = pybamm.BaseModel()
model.convert_to_format = "jax"
domain = ["negative electrode", "separator", "positive electrode"]
var = pybamm.Variable("var", domain=domain)
model.rhs = {var: 0.1 * var}
model.initial_conditions = {var: 1}
# No need to set parameters; can use base discretisation (no spatial operators)
# create discretisation
mesh = get_mesh_for_testing()
spatial_methods = {"macroscale": pybamm.FiniteVolume()}
disc = pybamm.Discretisation(mesh, spatial_methods)
disc.process_model(model)
# Solve
t_eval = np.linspace(0, 1, 80)
y0 = model.concatenated_initial_conditions.evaluate()
rhs = pybamm.EvaluatorJax(model.concatenated_rhs)
def fun(t, y, inputs):
return rhs.evaluate(t=t, y=y, inputs=inputs).reshape(-1)
t0 = time.perf_counter()
y, _ = pybamm.jax_bdf_integrate(
fun, y0, t_eval, inputs=None, rtol=1e-8, atol=1e-8)
t1 = time.perf_counter() - t0
# test accuracy
np.testing.assert_allclose(y[0, :], np.exp(0.1 * t_eval),
rtol=1e-7, atol=1e-7)
t0 = time.perf_counter()
y, _ = pybamm.jax_bdf_integrate(fun, y0, t_eval, rtol=1e-8, atol=1e-8)
t2 = time.perf_counter() - t0
# second run should be much quicker
self.assertLess(t2, t1)
# test second run is accurate
np.testing.assert_allclose(y[0, :], np.exp(0.1 * t_eval),
rtol=1e-7, atol=1e-7)
def test_solver_with_inputs(self):
# Create model
model = pybamm.BaseModel()
model.convert_to_format = "jax"
domain = ["negative electrode", "separator", "positive electrode"]
var = pybamm.Variable("var", domain=domain)
model.rhs = {var: -pybamm.InputParameter("rate") * var}
model.initial_conditions = {var: 1}
# create discretisation
mesh = get_mesh_for_testing()
spatial_methods = {"macroscale": pybamm.FiniteVolume()}
disc = pybamm.Discretisation(mesh, spatial_methods)
disc.process_model(model)
# Solve
t_eval = np.linspace(0, 10, 100)
y0 = model.concatenated_initial_conditions.evaluate()
rhs = pybamm.EvaluatorJax(model.concatenated_rhs)
def fun(t, y, inputs):
return rhs.evaluate(t=t, y=y, inputs=inputs).reshape(-1)
y, _ = pybamm.jax_bdf_integrate(fun, y0, t_eval, inputs={
"rate": 0.1}, rtol=1e-9, atol=1e-9)
np.testing.assert_allclose(y[0, :], np.exp(-0.1 * t_eval))
if __name__ == "__main__":
print("Add -v for more debug output")
if "-v" in sys.argv:
debug = True
pybamm.settings.debug_mode = True
unittest.main()
| en | 0.767979 | # Create model # No need to set parameters; can use base discretisation (no spatial operators) # create discretisation # Solve # test accuracy # second run should be much quicker # test second run is accurate # Create model # create discretisation # Solve | 2.202274 | 2 |
Python/dbscan_by_convolution.py | QuietIzm/ConDBSCAN | 0 | 6619119 | # coding:UTF-8
import numpy as np
import math
import cv2
import time
from collections import deque
color_list = np.array([[32, 178, 170], [240, 248, 255], [135, 206, 250], [138, 43, 226], [22, 184, 135], [165, 42, 42],
[60, 170, 113], [255, 140, 0], [173, 255, 47], [0, 128, 128], [166, 127, 120], [50, 67, 95],
[143, 134, 129], [225, 220, 217], [166, 127, 120], [145, 92, 76], [47, 24, 18], [64, 104, 106],
[250, 240, 230], [255, 228, 225], [100, 149, 237], [176, 196, 222], [46, 139, 87], [255, 174, 185],
[139, 58, 98], [193, 205, 205], [99, 184, 255], [141, 182, 205], [171, 130, 255], [139, 34, 82],
[139, 71, 93], [205, 179, 139], [255, 222, 173], [255, 99, 71], [205, 91, 69], [255, 165, 0], [139, 87, 66],
[205, 102, 29], [238, 154, 73], [139, 126, 102], [255, 231, 186], [238, 197, 145], [238, 121, 66],
[139, 101, 8], [238, 173, 14], [139, 105, 20], [238, 238, 0], [205, 205, 180], [205, 190, 112],
[162, 205, 90], [105, 139, 34], [192, 255, 62]], np.uint8)
class ConDBSCAN():
'''基于卷积实现的DBSCAN算法,时间复杂度O(n),空间复杂度O(1),仅用于图像分割'''
def __init__(self, kernel, minpts):
self.channels = 3
self.depth_bits = 8
self.kernel = kernel # 卷积核,三维np数组
self.minpts = minpts # 阈值参数
self.kernel_radius = [math.ceil((kernel.shape[i] - 1) / 2) for i in range(self.channels)] # 卷积核半径
def fit(self, data, dealed):
'''
1. 快速三维卷积计算核心对象得到 convoluted_result
2. 通过阈值参数 minpts 筛选 convoluted_result 中的核心对象,保存在 core_objects 中
3. 迭代生成聚类簇,聚类结果保存在 dealed 中,簇标签保存在 labels 中
'''
max_val = int(math.pow(2, self.depth_bits)) - 1
convoluted_result = self.__quick_convolute_3d(data)
core_objects = self.__get_core_objects(convoluted_result, data)
print('核心对象个数: ' + str(len(core_objects)))
visits = deque()
labels = set() # 保存簇标签
tag = 0 # 簇标识
for core in core_objects.keys():
if core_objects[core] == 1:
continue
visits.append(core)
core_objects[core] = 1
last = (0, 0, 0)
while len(visits) != 0:
sample = visits.popleft()
dealed[sample] = tag
if core_objects.get(sample, -2) != -2:
start = [sample[i] - self.kernel.shape[i] // 2 for i in range(self.channels)]
start = [x if x >= 0 else 0 for x in start]
end = [sample[i] + self.kernel.shape[i] // 2 for i in range(self.channels)]
end = [x if x <= max_val else max_val for x in end]
for i in range(start[0], end[0] + 1):
for j in range(start[1], end[1] + 1):
for k in range(start[2], end[2] + 1):
if i <= last[0] and j <= last[1] and k <= last[2]:
continue
# 如果是样本点,并且之前未曾标记
if dealed.get((i, j, k), -2) == -1:
if core_objects.get((i, j, k), -2) == -1:
visits.append((i, j, k))
core_objects[(i, j, k)] = 1
# print(str((i, j, k)) + ' ' + str(tag))
dealed[(i, j, k)] = tag
# print('%d %d %d = %d' % (i, j, k, tag))
last = sample
labels.add(tag)
tag += 1
return dealed, labels
def __convolution_x(self, input, radius):
'''
* description: X方向卷积
* input:
input: 输入矩阵(二维数组)
radius: 卷积核尺寸
* output:
local_sum: X方向卷积结果(二维数组)
'''
rows, cols = input.shape
if rows < radius:
print('error: kernel size is too biger!')
return -1
local_sum = np.zeros((rows, cols), dtype=np.int)
# 初始化,计算第0行local_sum_0
for i in range(radius + 1):
for j in range(cols):
local_sum[0, j] += input[i, j]
for i in range(1, rows):
if i <= radius:
for j in range(cols):
local_sum[i, j] = local_sum[i - 1, j] + input[i + radius, j]
elif i < rows - radius:
for j in range(cols):
local_sum[i, j] = local_sum[i - 1, j] + input[i + radius, j] - input[i - radius - 1, j]
else:
for j in range(cols):
local_sum[i, j] = local_sum[i - 1, j] - input[i - radius - 1, j]
return local_sum
def __convolution_y(self, input):
'''
* description: Y方向卷积
* input:
input: 输入矩阵(二维数组)
* output:
local_sum: Y方向卷积结果(二维数组)
'''
radius_y = self.kernel_radius[1] # Y方向卷积核尺寸
rows, cols = input.shape
if rows < radius_y:
print('error: kernel size is too biger!')
return -1
local_sum = np.zeros((rows, cols), dtype=np.int)
# 初始化,计算第0行local_sum_0
for i in range(rows):
for j in range(radius_y + 1):
local_sum[i, 0] += input[i, j]
for i in range(rows):
for j in range(i, cols):
if j <= radius_y:
local_sum[i, j] = local_sum[i, j - 1] + input[i, j + radius_y]
elif j < rows - radius_y:
local_sum[i, j] = local_sum[i, j - 1] + input[i, j + radius_y] - input[i, j - radius_y - 1]
else:
local_sum[i, j] = local_sum[i, j - 1] - input[i, j - radius_y - 1]
return local_sum
def __quick_convolute_2d(self, input):
'''
* description: 二维卷积,先进行X方向卷积,再进行Y方向卷积(调换方向结果相同)
* input:
input: 输入矩阵
* output:
result: 二维卷积结果,是一个二维数组
'''
local_sum = self.__convolution_x(input, self.kernel_radius[0])
c2d_result = self.__convolution_y(local_sum)
return c2d_result
def __quick_convolute_3d(self, data):
'''
* description: 基于线性规划优化三维卷积
* input:
data: 表示图像颜色信息的三维矩阵
* output:
c3d_result: 三维卷积结果,是一个三维数组
'''
r, c, h = data.shape
c2d_result = np.zeros((r, c, h), dtype=np.int)
# 首先对每一层进行二维卷积
for i in range(r):
c2d_result[i, :, :] = self.__quick_convolute_2d(data[i, :, :])
# 对二维卷积后的矩阵换一个维度再次进行卷积,得到三维卷积结果
c3d_result = np.zeros((r, c, h), dtype=np.int)
for j in range(c):
c3d_result[:, j, :] = self.__convolution_x(c2d_result[:, j, :], self.kernel_radius[2])
return c3d_result
def __get_core_objects(self, convoluted_result, data):
'''
* description: 根据阈值参数 minpts 筛选核心对象
* input:
convoluted_result: 三维卷积结果
data: 表示图像颜色信息的三维矩阵
* output:
core_objects: 保存核心对象的字典
'''
core_objects = {} # 保存核心对象
max_val = int(math.pow(2, self.depth_bits))
for i in range(max_val):
for j in range(max_val):
for k in range(max_val):
if convoluted_result[i, j, k] > self.minpts and data[i, j, k] != 0:
core_objects[(i, j, k)] = -1
return core_objects
def image_to_3DMatrix(self, img):
'''
* description: 将图像映射向RGB颜色空间
* input:
img: 输入图像
* output:
color_matrix: 表示图像颜色信息的三维矩阵
color_list: 图像中包含的颜色列表
'''
max_val = int(math.pow(2, self.depth_bits))
color_matrix = np.zeros((max_val, max_val, max_val))
dealed = {}
rows, cols, channels = img.shape
for i in range(rows):
for j in range(cols):
b, g, r = img[i, j]
color_matrix[b, g, r] = 1
dealed[(b, g, r)] = -1
return color_matrix, dealed
def cluster_to_single_image(self, img, clusters):
'''在一张图像中显示分割结果'''
rows, cols, channels = img.shape
segmented_image = np.ones(img.shape, np.uint8) * 155
for r in range(rows):
for c in range(cols):
color = tuple(img[r, c])
label = clusters[color]
segmented_image[r, c] = color_list[label]
return segmented_image
def my_show(win_name, src):
cv2.namedWindow(win_name, cv2.WINDOW_NORMAL)
cv2.imshow(win_name, src)
if __name__ == '__main__':
'''
功能测试,入口函数
'''
start = time.time()
# img = cv2.imread(r'E:\MachineLearning\ImageSegmentation_Cluster\capsule_images\origin\capsule_1.bmp')
img = cv2.imread(r'E:\MachineLearning\ImageSegmentation_Cluster\capsule_images\xdpi\capsule_2.bmp')
conDB = ConDBSCAN(np.ones((3, 7, 9)), 60)
data, dealed = conDB.image_to_3DMatrix(img)
print('数据集大小: ' + str(len(dealed)))
clusters, labels = conDB.fit(data, dealed)
end = time.time()
print('程序运行时间:%f s' % (end - start))
print('分类簇个数:%d' % len(labels))
seg_img = conDB.cluster_to_single_image(img, clusters)
my_show('segementation', seg_img)
cv2.waitKey(0)
| # coding:UTF-8
import numpy as np
import math
import cv2
import time
from collections import deque
color_list = np.array([[32, 178, 170], [240, 248, 255], [135, 206, 250], [138, 43, 226], [22, 184, 135], [165, 42, 42],
[60, 170, 113], [255, 140, 0], [173, 255, 47], [0, 128, 128], [166, 127, 120], [50, 67, 95],
[143, 134, 129], [225, 220, 217], [166, 127, 120], [145, 92, 76], [47, 24, 18], [64, 104, 106],
[250, 240, 230], [255, 228, 225], [100, 149, 237], [176, 196, 222], [46, 139, 87], [255, 174, 185],
[139, 58, 98], [193, 205, 205], [99, 184, 255], [141, 182, 205], [171, 130, 255], [139, 34, 82],
[139, 71, 93], [205, 179, 139], [255, 222, 173], [255, 99, 71], [205, 91, 69], [255, 165, 0], [139, 87, 66],
[205, 102, 29], [238, 154, 73], [139, 126, 102], [255, 231, 186], [238, 197, 145], [238, 121, 66],
[139, 101, 8], [238, 173, 14], [139, 105, 20], [238, 238, 0], [205, 205, 180], [205, 190, 112],
[162, 205, 90], [105, 139, 34], [192, 255, 62]], np.uint8)
class ConDBSCAN():
'''基于卷积实现的DBSCAN算法,时间复杂度O(n),空间复杂度O(1),仅用于图像分割'''
def __init__(self, kernel, minpts):
self.channels = 3
self.depth_bits = 8
self.kernel = kernel # 卷积核,三维np数组
self.minpts = minpts # 阈值参数
self.kernel_radius = [math.ceil((kernel.shape[i] - 1) / 2) for i in range(self.channels)] # 卷积核半径
def fit(self, data, dealed):
'''
1. 快速三维卷积计算核心对象得到 convoluted_result
2. 通过阈值参数 minpts 筛选 convoluted_result 中的核心对象,保存在 core_objects 中
3. 迭代生成聚类簇,聚类结果保存在 dealed 中,簇标签保存在 labels 中
'''
max_val = int(math.pow(2, self.depth_bits)) - 1
convoluted_result = self.__quick_convolute_3d(data)
core_objects = self.__get_core_objects(convoluted_result, data)
print('核心对象个数: ' + str(len(core_objects)))
visits = deque()
labels = set() # 保存簇标签
tag = 0 # 簇标识
for core in core_objects.keys():
if core_objects[core] == 1:
continue
visits.append(core)
core_objects[core] = 1
last = (0, 0, 0)
while len(visits) != 0:
sample = visits.popleft()
dealed[sample] = tag
if core_objects.get(sample, -2) != -2:
start = [sample[i] - self.kernel.shape[i] // 2 for i in range(self.channels)]
start = [x if x >= 0 else 0 for x in start]
end = [sample[i] + self.kernel.shape[i] // 2 for i in range(self.channels)]
end = [x if x <= max_val else max_val for x in end]
for i in range(start[0], end[0] + 1):
for j in range(start[1], end[1] + 1):
for k in range(start[2], end[2] + 1):
if i <= last[0] and j <= last[1] and k <= last[2]:
continue
# 如果是样本点,并且之前未曾标记
if dealed.get((i, j, k), -2) == -1:
if core_objects.get((i, j, k), -2) == -1:
visits.append((i, j, k))
core_objects[(i, j, k)] = 1
# print(str((i, j, k)) + ' ' + str(tag))
dealed[(i, j, k)] = tag
# print('%d %d %d = %d' % (i, j, k, tag))
last = sample
labels.add(tag)
tag += 1
return dealed, labels
def __convolution_x(self, input, radius):
'''
* description: X方向卷积
* input:
input: 输入矩阵(二维数组)
radius: 卷积核尺寸
* output:
local_sum: X方向卷积结果(二维数组)
'''
rows, cols = input.shape
if rows < radius:
print('error: kernel size is too biger!')
return -1
local_sum = np.zeros((rows, cols), dtype=np.int)
# 初始化,计算第0行local_sum_0
for i in range(radius + 1):
for j in range(cols):
local_sum[0, j] += input[i, j]
for i in range(1, rows):
if i <= radius:
for j in range(cols):
local_sum[i, j] = local_sum[i - 1, j] + input[i + radius, j]
elif i < rows - radius:
for j in range(cols):
local_sum[i, j] = local_sum[i - 1, j] + input[i + radius, j] - input[i - radius - 1, j]
else:
for j in range(cols):
local_sum[i, j] = local_sum[i - 1, j] - input[i - radius - 1, j]
return local_sum
def __convolution_y(self, input):
'''
* description: Y方向卷积
* input:
input: 输入矩阵(二维数组)
* output:
local_sum: Y方向卷积结果(二维数组)
'''
radius_y = self.kernel_radius[1] # Y方向卷积核尺寸
rows, cols = input.shape
if rows < radius_y:
print('error: kernel size is too biger!')
return -1
local_sum = np.zeros((rows, cols), dtype=np.int)
# 初始化,计算第0行local_sum_0
for i in range(rows):
for j in range(radius_y + 1):
local_sum[i, 0] += input[i, j]
for i in range(rows):
for j in range(i, cols):
if j <= radius_y:
local_sum[i, j] = local_sum[i, j - 1] + input[i, j + radius_y]
elif j < rows - radius_y:
local_sum[i, j] = local_sum[i, j - 1] + input[i, j + radius_y] - input[i, j - radius_y - 1]
else:
local_sum[i, j] = local_sum[i, j - 1] - input[i, j - radius_y - 1]
return local_sum
def __quick_convolute_2d(self, input):
'''
* description: 二维卷积,先进行X方向卷积,再进行Y方向卷积(调换方向结果相同)
* input:
input: 输入矩阵
* output:
result: 二维卷积结果,是一个二维数组
'''
local_sum = self.__convolution_x(input, self.kernel_radius[0])
c2d_result = self.__convolution_y(local_sum)
return c2d_result
def __quick_convolute_3d(self, data):
'''
* description: 基于线性规划优化三维卷积
* input:
data: 表示图像颜色信息的三维矩阵
* output:
c3d_result: 三维卷积结果,是一个三维数组
'''
r, c, h = data.shape
c2d_result = np.zeros((r, c, h), dtype=np.int)
# 首先对每一层进行二维卷积
for i in range(r):
c2d_result[i, :, :] = self.__quick_convolute_2d(data[i, :, :])
# 对二维卷积后的矩阵换一个维度再次进行卷积,得到三维卷积结果
c3d_result = np.zeros((r, c, h), dtype=np.int)
for j in range(c):
c3d_result[:, j, :] = self.__convolution_x(c2d_result[:, j, :], self.kernel_radius[2])
return c3d_result
def __get_core_objects(self, convoluted_result, data):
'''
* description: 根据阈值参数 minpts 筛选核心对象
* input:
convoluted_result: 三维卷积结果
data: 表示图像颜色信息的三维矩阵
* output:
core_objects: 保存核心对象的字典
'''
core_objects = {} # 保存核心对象
max_val = int(math.pow(2, self.depth_bits))
for i in range(max_val):
for j in range(max_val):
for k in range(max_val):
if convoluted_result[i, j, k] > self.minpts and data[i, j, k] != 0:
core_objects[(i, j, k)] = -1
return core_objects
def image_to_3DMatrix(self, img):
'''
* description: 将图像映射向RGB颜色空间
* input:
img: 输入图像
* output:
color_matrix: 表示图像颜色信息的三维矩阵
color_list: 图像中包含的颜色列表
'''
max_val = int(math.pow(2, self.depth_bits))
color_matrix = np.zeros((max_val, max_val, max_val))
dealed = {}
rows, cols, channels = img.shape
for i in range(rows):
for j in range(cols):
b, g, r = img[i, j]
color_matrix[b, g, r] = 1
dealed[(b, g, r)] = -1
return color_matrix, dealed
def cluster_to_single_image(self, img, clusters):
'''在一张图像中显示分割结果'''
rows, cols, channels = img.shape
segmented_image = np.ones(img.shape, np.uint8) * 155
for r in range(rows):
for c in range(cols):
color = tuple(img[r, c])
label = clusters[color]
segmented_image[r, c] = color_list[label]
return segmented_image
def my_show(win_name, src):
cv2.namedWindow(win_name, cv2.WINDOW_NORMAL)
cv2.imshow(win_name, src)
if __name__ == '__main__':
'''
功能测试,入口函数
'''
start = time.time()
# img = cv2.imread(r'E:\MachineLearning\ImageSegmentation_Cluster\capsule_images\origin\capsule_1.bmp')
img = cv2.imread(r'E:\MachineLearning\ImageSegmentation_Cluster\capsule_images\xdpi\capsule_2.bmp')
conDB = ConDBSCAN(np.ones((3, 7, 9)), 60)
data, dealed = conDB.image_to_3DMatrix(img)
print('数据集大小: ' + str(len(dealed)))
clusters, labels = conDB.fit(data, dealed)
end = time.time()
print('程序运行时间:%f s' % (end - start))
print('分类簇个数:%d' % len(labels))
seg_img = conDB.cluster_to_single_image(img, clusters)
my_show('segementation', seg_img)
cv2.waitKey(0)
| zh | 0.868278 | # coding:UTF-8 基于卷积实现的DBSCAN算法,时间复杂度O(n),空间复杂度O(1),仅用于图像分割 # 卷积核,三维np数组 # 阈值参数 # 卷积核半径 1. 快速三维卷积计算核心对象得到 convoluted_result 2. 通过阈值参数 minpts 筛选 convoluted_result 中的核心对象,保存在 core_objects 中 3. 迭代生成聚类簇,聚类结果保存在 dealed 中,簇标签保存在 labels 中 # 保存簇标签 # 簇标识 # 如果是样本点,并且之前未曾标记 # print(str((i, j, k)) + ' ' + str(tag)) # print('%d %d %d = %d' % (i, j, k, tag)) * description: X方向卷积 * input: input: 输入矩阵(二维数组) radius: 卷积核尺寸 * output: local_sum: X方向卷积结果(二维数组) # 初始化,计算第0行local_sum_0 * description: Y方向卷积 * input: input: 输入矩阵(二维数组) * output: local_sum: Y方向卷积结果(二维数组) # Y方向卷积核尺寸 # 初始化,计算第0行local_sum_0 * description: 二维卷积,先进行X方向卷积,再进行Y方向卷积(调换方向结果相同) * input: input: 输入矩阵 * output: result: 二维卷积结果,是一个二维数组 * description: 基于线性规划优化三维卷积 * input: data: 表示图像颜色信息的三维矩阵 * output: c3d_result: 三维卷积结果,是一个三维数组 # 首先对每一层进行二维卷积 # 对二维卷积后的矩阵换一个维度再次进行卷积,得到三维卷积结果 * description: 根据阈值参数 minpts 筛选核心对象 * input: convoluted_result: 三维卷积结果 data: 表示图像颜色信息的三维矩阵 * output: core_objects: 保存核心对象的字典 # 保存核心对象 * description: 将图像映射向RGB颜色空间 * input: img: 输入图像 * output: color_matrix: 表示图像颜色信息的三维矩阵 color_list: 图像中包含的颜色列表 在一张图像中显示分割结果 功能测试,入口函数 # img = cv2.imread(r'E:\MachineLearning\ImageSegmentation_Cluster\capsule_images\origin\capsule_1.bmp') | 2.397054 | 2 |
tests/test_core/test_algebraic/test_converter.py | Aubhro/chess_py | 14 | 6619120 | import unittest
from chess_py import converter, Board, Move, Location, color, notation_const
from chess_py import Pawn, Knight, Queen, Rook
class TestConverter(unittest.TestCase):
def setUp(self):
self.test_board = Board.init_default()
self.e_four_move = Move(end_loc=Location.from_string("e4"),
piece=Pawn(color.white, Location.from_string("e4")),
status=notation_const.MOVEMENT,
start_loc=Location.from_string("e2"))
def test_short_alg(self):
self.assertEqual(converter.short_alg("e4", color.white, self.test_board), self.e_four_move)
def test_incomplete_alg_pawn_movement(self):
self.assertEqual(
converter.incomplete_alg("e4", color.white, self.test_board),
Move(
end_loc=Location.from_string("e4"),
piece=Pawn(color.white, Location.from_string("e4")),
status=notation_const.MOVEMENT,
start_loc=Location.from_string("e2")
)
)
def test_incomplete_alg_piece_movement(self):
self.assertEqual(
converter.incomplete_alg("Nf3", color.white, self.test_board),
Move(
end_loc=Location.from_string("f3"),
piece=Knight(color.white, Location.from_string("f3")),
status=notation_const.MOVEMENT,
start_loc=Location.from_string("g1")
)
)
def test_incomplete_alg_pawn_capture(self):
self.test_board.update(converter.short_alg("e4", color.white, self.test_board))
self.test_board.update(converter.short_alg("d5", color.black, self.test_board))
self.assertEqual(
converter.incomplete_alg("exd5", color.white, self.test_board),
Move(
end_loc=Location.from_string("d5"),
piece=Pawn(color.white, Location.from_string("e4")),
status=notation_const.CAPTURE,
start_loc=Location.from_string("e4")
)
)
def test_incomplete_alg_piece_capture(self):
self.test_board.update(converter.short_alg("Nf3", color.white, self.test_board))
self.test_board.update(converter.short_alg("e5", color.black, self.test_board))
self.assertEqual(
converter.incomplete_alg("Nxe5", color.white, self.test_board),
Move(
end_loc=Location.from_string("e5"),
piece=Knight(color.white, Location.from_string("f3")),
status=notation_const.CAPTURE,
start_loc=Location.from_string("f3")
)
)
def test_incomplete_alg_pawn_promotion(self):
self.test_board.move_piece(Location.from_string("a2"), Location.from_string("a7"))
self.test_board.remove_piece_at_square(Location.from_string("a8"))
self.assertEqual(
converter.incomplete_alg("a8=Q", color.white, self.test_board),
Move(
end_loc=Location.from_string("a8"),
piece=Pawn(color.white, Location.from_string("e7")),
status=notation_const.PROMOTE,
promoted_to_piece=Queen,
start_loc=Location.from_string("a7")
)
)
def test_incomplete_alg_piece_movement_with_file_specified(self):
self.assertEqual(
converter.incomplete_alg("gNf3", color.white, self.test_board),
Move(
end_loc=Location.from_string("f3"),
piece=Knight(color.white, Location.from_string("g1")),
status=notation_const.MOVEMENT,
start_loc=Location.from_string("g1")
)
)
def test_incomplete_alg_piece_movement_with_file_specified_alt(self):
self.assertEqual(
converter.incomplete_alg("Ngf3", color.white, self.test_board),
Move(
end_loc=Location.from_string("f3"),
piece=Knight(color.white, Location.from_string("g1")),
status=notation_const.MOVEMENT,
start_loc=Location.from_string("g1")
)
)
def test_incomplete_alg_piece_movement_with_rank_and_file_specified(self):
self.assertEqual(
converter.incomplete_alg("e1Nf3", color.white, self.test_board),
Move(
end_loc=Location.from_string("f3"),
piece=Knight(color.white, Location.from_string("e1")),
status=notation_const.MOVEMENT,
start_loc=Location.from_string("e1")
)
)
def test_incomplete_alg_pawn_promotion_with_capture(self):
self.test_board.move_piece(Location.from_string("a2"), Location.from_string("a7"))
self.assertEqual(
converter.incomplete_alg("axb8=R", color.white, self.test_board),
Move(
end_loc=Location.from_string("b8"),
piece=Pawn(color.white, Location.from_string("a7")),
status=notation_const.CAPTURE_AND_PROMOTE,
promoted_to_piece=Rook,
start_loc=Location.from_string("a7")
)
)
| import unittest
from chess_py import converter, Board, Move, Location, color, notation_const
from chess_py import Pawn, Knight, Queen, Rook
class TestConverter(unittest.TestCase):
def setUp(self):
self.test_board = Board.init_default()
self.e_four_move = Move(end_loc=Location.from_string("e4"),
piece=Pawn(color.white, Location.from_string("e4")),
status=notation_const.MOVEMENT,
start_loc=Location.from_string("e2"))
def test_short_alg(self):
self.assertEqual(converter.short_alg("e4", color.white, self.test_board), self.e_four_move)
def test_incomplete_alg_pawn_movement(self):
self.assertEqual(
converter.incomplete_alg("e4", color.white, self.test_board),
Move(
end_loc=Location.from_string("e4"),
piece=Pawn(color.white, Location.from_string("e4")),
status=notation_const.MOVEMENT,
start_loc=Location.from_string("e2")
)
)
def test_incomplete_alg_piece_movement(self):
self.assertEqual(
converter.incomplete_alg("Nf3", color.white, self.test_board),
Move(
end_loc=Location.from_string("f3"),
piece=Knight(color.white, Location.from_string("f3")),
status=notation_const.MOVEMENT,
start_loc=Location.from_string("g1")
)
)
def test_incomplete_alg_pawn_capture(self):
self.test_board.update(converter.short_alg("e4", color.white, self.test_board))
self.test_board.update(converter.short_alg("d5", color.black, self.test_board))
self.assertEqual(
converter.incomplete_alg("exd5", color.white, self.test_board),
Move(
end_loc=Location.from_string("d5"),
piece=Pawn(color.white, Location.from_string("e4")),
status=notation_const.CAPTURE,
start_loc=Location.from_string("e4")
)
)
def test_incomplete_alg_piece_capture(self):
self.test_board.update(converter.short_alg("Nf3", color.white, self.test_board))
self.test_board.update(converter.short_alg("e5", color.black, self.test_board))
self.assertEqual(
converter.incomplete_alg("Nxe5", color.white, self.test_board),
Move(
end_loc=Location.from_string("e5"),
piece=Knight(color.white, Location.from_string("f3")),
status=notation_const.CAPTURE,
start_loc=Location.from_string("f3")
)
)
def test_incomplete_alg_pawn_promotion(self):
self.test_board.move_piece(Location.from_string("a2"), Location.from_string("a7"))
self.test_board.remove_piece_at_square(Location.from_string("a8"))
self.assertEqual(
converter.incomplete_alg("a8=Q", color.white, self.test_board),
Move(
end_loc=Location.from_string("a8"),
piece=Pawn(color.white, Location.from_string("e7")),
status=notation_const.PROMOTE,
promoted_to_piece=Queen,
start_loc=Location.from_string("a7")
)
)
def test_incomplete_alg_piece_movement_with_file_specified(self):
self.assertEqual(
converter.incomplete_alg("gNf3", color.white, self.test_board),
Move(
end_loc=Location.from_string("f3"),
piece=Knight(color.white, Location.from_string("g1")),
status=notation_const.MOVEMENT,
start_loc=Location.from_string("g1")
)
)
def test_incomplete_alg_piece_movement_with_file_specified_alt(self):
self.assertEqual(
converter.incomplete_alg("Ngf3", color.white, self.test_board),
Move(
end_loc=Location.from_string("f3"),
piece=Knight(color.white, Location.from_string("g1")),
status=notation_const.MOVEMENT,
start_loc=Location.from_string("g1")
)
)
def test_incomplete_alg_piece_movement_with_rank_and_file_specified(self):
self.assertEqual(
converter.incomplete_alg("e1Nf3", color.white, self.test_board),
Move(
end_loc=Location.from_string("f3"),
piece=Knight(color.white, Location.from_string("e1")),
status=notation_const.MOVEMENT,
start_loc=Location.from_string("e1")
)
)
def test_incomplete_alg_pawn_promotion_with_capture(self):
self.test_board.move_piece(Location.from_string("a2"), Location.from_string("a7"))
self.assertEqual(
converter.incomplete_alg("axb8=R", color.white, self.test_board),
Move(
end_loc=Location.from_string("b8"),
piece=Pawn(color.white, Location.from_string("a7")),
status=notation_const.CAPTURE_AND_PROMOTE,
promoted_to_piece=Rook,
start_loc=Location.from_string("a7")
)
)
| none | 1 | 3.210014 | 3 | |
documentation/sphinx/examples_tracer.py | arthus701/algopy | 54 | 6619121 | <filename>documentation/sphinx/examples_tracer.py
from algopy import CGraph, Function
cg = CGraph()
cg.trace_on()
x = Function(1)
y = Function(3)
z = x * y + x
cg.trace_off()
cg.independentFunctionList = [x,y]
cg.dependentFunctionList = [z]
print(cg)
cg.plot('example_tracer_cgraph.png') | <filename>documentation/sphinx/examples_tracer.py
from algopy import CGraph, Function
cg = CGraph()
cg.trace_on()
x = Function(1)
y = Function(3)
z = x * y + x
cg.trace_off()
cg.independentFunctionList = [x,y]
cg.dependentFunctionList = [z]
print(cg)
cg.plot('example_tracer_cgraph.png') | none | 1 | 2.179951 | 2 | |
notebooks/icos_jupyter_notebooks/tools/math/roundfuncs.py | ICOS-Carbon-Portal/jupyter | 6 | 6619122 | <reponame>ICOS-Carbon-Portal/jupyter
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Description: Functions that take a numeric variable as input
and return the nearest integer by ±10, ±20 or ±100.
"""
__author__ = ["<NAME>"]
__credits__ = "ICOS Carbon Portal"
__license__ = "GPL-3.0"
__version__ = "0.1.0"
__maintainer__ = "ICOS Carbon Portal, elaborated products team"
__email__ = ['<EMAIL>', '<EMAIL>']
__date__ = "2020-10-13"
############################## round down 10 ############################
def rounddown_10(x):
"""
Project: 'ICOS Carbon Portal'
Created: Tue May 07 10:30:00 2019
Last Changed: Tue May 07 10:30:00 2019
Version: 1.0.0
Author(s): Karolina
Description: Function that takes a number as input and
floors it down to the closest "10".
Input parameters: Number (var_name: 'x', var_type: Integer or Float)
Output: Float
"""
#import module:
import math
import numbers
#Check if input parameter is numeric:
if(isinstance(x, numbers.Number)==True):
#Return rounded value:
return int(math.ceil(x / 10.0)) * 10 -10
#If input parameter is NOT numeric, prompt an error message:
else:
print("Input parameter is not numeric!")
############################################################################
def roundup_10(x):
"""
Project: 'ICOS Carbon Portal'
Created: Tue May 07 10:30:00 2018
Last Changed: Tue May 07 10:30:00 2019
Version: 1.0.0
Author(s): Karolina
Description: Function that takes a number as input and
rounds it up to the closest "10".
Input parameters: Number (var_name: 'x', var_type: Integer or Float)
Output: Float
"""
#import module:
import math
import numbers
#Check if input parameter is numeric:
if(isinstance(x, numbers.Number)==True):
#Return rounded value:
return int(math.ceil(x / 10.0)) * 10
#If input parameter is NOT numeric, prompt an error message:
else:
print("Input parameter is not numeric!")
############################################################################
############################### round down 20 ##############################
def rounddown_20(x):
"""
Project: 'ICOS Carbon Portal'
Created: Tue May 07 09:00:00 2019
Last Changed: Tue May 07 09:00:00 2019
Version: 1.0.0
Author(s): Karolina
Description: Function that takes a number as input and
floors it to the nearest "20".
Input parameters: Number (var_name: 'x', var_type: Integer or Float)
Output: Float
"""
#Import module:
import math
import numbers
#Check if input parameter is numeric:
if(isinstance(x, numbers.Number)==True):
#If the 2nd digit from the decimal point is an even number:
if(int(x/10.0)%2==0):
return(int(x / 10.0) * 10) - 20
#If the 2nd digit from the decimal point is an odd number:
else:
return(int(x / 10.0) * 10) - 10
#If input parameter is not numeric, prompt an error message:
else:
print("Input parameter is not numeric!")
############################################################################
def roundup_20(x):
"""
Project: 'ICOS Carbon Portal'
Created: Tue May 07 09:00:00 2019
Last Changed: Tue May 07 09:00:00 2019
Version: 1.0.0
Author(s): Karolina
Description: Function that takes a number as input and
rounds it up to the closest "20".
Input parameters: Number (var_name: 'x', var_type: Integer or Float)
Output: Float
"""
#Import module:
import math
import numbers
#Check if input parameter is numeric:
if(isinstance(x, numbers.Number)==True):
#for positive numbers, multiples of 20.0:
if((x>=0)&(((x/10.0)%20)%2 == 0)):
return int(math.ceil(x / 10.0)) * 10 +20
#for positive numbers with an even number as 2nd digit:
elif((x>0)&(int(x/10.0)%2==0)):
return int(math.ceil(x / 10.0)) * 10 +10
#for positive and negative numbers, whose 2nd digit
#is an odd number (except for i in [-1,-9]):
elif(int(x/10.0)%2!=0):
return int((x / 10.0)) * 10 +10
#for negative numbers, whose 1st or 2nd digit is an even number:
elif((x<-10) & (int(x)%2==0)):
return int((x / 10.0)) * 10 +20
else:
return 0
#If input parameter is NOT numeric, prompt an error message:
else:
print("Input parameter is not numeric!")
############################################################################
############################### round down 100 #############################
def rounddown_100(x):
"""
Project: 'ICOS Carbon Portal'
Created: Tue May 07 09:00:00 2019
Last Changed: Tue May 07 09:00:00 2019
Version: 1.0.0
Author(s): Karolina
Description: Function that takes a number as input and
floors it to the nearest "100".
Input parameters: Number (var_name: 'x', var_type: Integer or Float)
Output: Float
"""
#Import module:
import numbers
#Check if input parameter is numeric:
if(isinstance(x, numbers.Number)==True):
#If the number is an integral multiple of 100:
if(((x/100.0)%2==0) or (x<=0) or (x==100)):
return(int(x / 100.0) * 100) - 100
#If the input number is NOT an integral multiple of 100:
else:
return(int(x / 100.0) * 100)
#If input parameter is not numeric, prompt an error message:
else:
print("Input parameter is not numeric!")
##############################################################################
def roundup_100(x):
"""
Project: 'ICOS Carbon Portal'
Created: Tue May 07 09:00:00 2019
Last Changed: Tue May 07 09:00:00 2019
Version: 1.0.0
Author(s): Karolina
Description: Function that takes a number as input and
rounds it up to the nearest "100".
Input parameters: Number (var_name: 'x', var_type: Integer or Float)
Output: Float
"""
#Import modules:
import math
import numbers
#Check if input parameter is numeric:
if(isinstance(x, numbers.Number)==True):
#for integral mulitples of 100 and for the
#special cases of 100, 0 and -100:
if(((x/100.0)%2==0) or (x==100) or (x==-100)):
return int(math.ceil(x / 100.0)) * 100 + 100
else:
return int(math.ceil(x / 100.0)) * 100
#If input parameter is not numeric, prompt an error message:
else:
print("Input parameter is not numeric!")
############################################################################## | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Description: Functions that take a numeric variable as input
and return the nearest integer by ±10, ±20 or ±100.
"""
__author__ = ["<NAME>"]
__credits__ = "ICOS Carbon Portal"
__license__ = "GPL-3.0"
__version__ = "0.1.0"
__maintainer__ = "ICOS Carbon Portal, elaborated products team"
__email__ = ['<EMAIL>', '<EMAIL>']
__date__ = "2020-10-13"
############################## round down 10 ############################
def rounddown_10(x):
"""
Project: 'ICOS Carbon Portal'
Created: Tue May 07 10:30:00 2019
Last Changed: Tue May 07 10:30:00 2019
Version: 1.0.0
Author(s): Karolina
Description: Function that takes a number as input and
floors it down to the closest "10".
Input parameters: Number (var_name: 'x', var_type: Integer or Float)
Output: Float
"""
#import module:
import math
import numbers
#Check if input parameter is numeric:
if(isinstance(x, numbers.Number)==True):
#Return rounded value:
return int(math.ceil(x / 10.0)) * 10 -10
#If input parameter is NOT numeric, prompt an error message:
else:
print("Input parameter is not numeric!")
############################################################################
def roundup_10(x):
"""
Project: 'ICOS Carbon Portal'
Created: Tue May 07 10:30:00 2018
Last Changed: Tue May 07 10:30:00 2019
Version: 1.0.0
Author(s): Karolina
Description: Function that takes a number as input and
rounds it up to the closest "10".
Input parameters: Number (var_name: 'x', var_type: Integer or Float)
Output: Float
"""
#import module:
import math
import numbers
#Check if input parameter is numeric:
if(isinstance(x, numbers.Number)==True):
#Return rounded value:
return int(math.ceil(x / 10.0)) * 10
#If input parameter is NOT numeric, prompt an error message:
else:
print("Input parameter is not numeric!")
############################################################################
############################### round down 20 ##############################
def rounddown_20(x):
"""
Project: 'ICOS Carbon Portal'
Created: Tue May 07 09:00:00 2019
Last Changed: Tue May 07 09:00:00 2019
Version: 1.0.0
Author(s): Karolina
Description: Function that takes a number as input and
floors it to the nearest "20".
Input parameters: Number (var_name: 'x', var_type: Integer or Float)
Output: Float
"""
#Import module:
import math
import numbers
#Check if input parameter is numeric:
if(isinstance(x, numbers.Number)==True):
#If the 2nd digit from the decimal point is an even number:
if(int(x/10.0)%2==0):
return(int(x / 10.0) * 10) - 20
#If the 2nd digit from the decimal point is an odd number:
else:
return(int(x / 10.0) * 10) - 10
#If input parameter is not numeric, prompt an error message:
else:
print("Input parameter is not numeric!")
############################################################################
def roundup_20(x):
"""
Project: 'ICOS Carbon Portal'
Created: Tue May 07 09:00:00 2019
Last Changed: Tue May 07 09:00:00 2019
Version: 1.0.0
Author(s): Karolina
Description: Function that takes a number as input and
rounds it up to the closest "20".
Input parameters: Number (var_name: 'x', var_type: Integer or Float)
Output: Float
"""
#Import module:
import math
import numbers
#Check if input parameter is numeric:
if(isinstance(x, numbers.Number)==True):
#for positive numbers, multiples of 20.0:
if((x>=0)&(((x/10.0)%20)%2 == 0)):
return int(math.ceil(x / 10.0)) * 10 +20
#for positive numbers with an even number as 2nd digit:
elif((x>0)&(int(x/10.0)%2==0)):
return int(math.ceil(x / 10.0)) * 10 +10
#for positive and negative numbers, whose 2nd digit
#is an odd number (except for i in [-1,-9]):
elif(int(x/10.0)%2!=0):
return int((x / 10.0)) * 10 +10
#for negative numbers, whose 1st or 2nd digit is an even number:
elif((x<-10) & (int(x)%2==0)):
return int((x / 10.0)) * 10 +20
else:
return 0
#If input parameter is NOT numeric, prompt an error message:
else:
print("Input parameter is not numeric!")
############################################################################
############################### round down 100 #############################
def rounddown_100(x):
"""
Project: 'ICOS Carbon Portal'
Created: Tue May 07 09:00:00 2019
Last Changed: Tue May 07 09:00:00 2019
Version: 1.0.0
Author(s): Karolina
Description: Function that takes a number as input and
floors it to the nearest "100".
Input parameters: Number (var_name: 'x', var_type: Integer or Float)
Output: Float
"""
#Import module:
import numbers
#Check if input parameter is numeric:
if(isinstance(x, numbers.Number)==True):
#If the number is an integral multiple of 100:
if(((x/100.0)%2==0) or (x<=0) or (x==100)):
return(int(x / 100.0) * 100) - 100
#If the input number is NOT an integral multiple of 100:
else:
return(int(x / 100.0) * 100)
#If input parameter is not numeric, prompt an error message:
else:
print("Input parameter is not numeric!")
##############################################################################
def roundup_100(x):
"""
Project: 'ICOS Carbon Portal'
Created: Tue May 07 09:00:00 2019
Last Changed: Tue May 07 09:00:00 2019
Version: 1.0.0
Author(s): Karolina
Description: Function that takes a number as input and
rounds it up to the nearest "100".
Input parameters: Number (var_name: 'x', var_type: Integer or Float)
Output: Float
"""
#Import modules:
import math
import numbers
#Check if input parameter is numeric:
if(isinstance(x, numbers.Number)==True):
#for integral mulitples of 100 and for the
#special cases of 100, 0 and -100:
if(((x/100.0)%2==0) or (x==100) or (x==-100)):
return int(math.ceil(x / 100.0)) * 100 + 100
else:
return int(math.ceil(x / 100.0)) * 100
#If input parameter is not numeric, prompt an error message:
else:
print("Input parameter is not numeric!")
############################################################################## | en | 0.41365 | #!/usr/bin/env python # -*- coding: utf-8 -*- Description: Functions that take a numeric variable as input and return the nearest integer by ±10, ±20 or ±100. ############################## round down 10 ############################ Project: 'ICOS Carbon Portal' Created: Tue May 07 10:30:00 2019 Last Changed: Tue May 07 10:30:00 2019 Version: 1.0.0 Author(s): Karolina Description: Function that takes a number as input and floors it down to the closest "10". Input parameters: Number (var_name: 'x', var_type: Integer or Float) Output: Float #import module: #Check if input parameter is numeric: #Return rounded value: #If input parameter is NOT numeric, prompt an error message: ############################################################################ Project: 'ICOS Carbon Portal' Created: Tue May 07 10:30:00 2018 Last Changed: Tue May 07 10:30:00 2019 Version: 1.0.0 Author(s): Karolina Description: Function that takes a number as input and rounds it up to the closest "10". Input parameters: Number (var_name: 'x', var_type: Integer or Float) Output: Float #import module: #Check if input parameter is numeric: #Return rounded value: #If input parameter is NOT numeric, prompt an error message: ############################################################################ ############################### round down 20 ############################## Project: 'ICOS Carbon Portal' Created: Tue May 07 09:00:00 2019 Last Changed: Tue May 07 09:00:00 2019 Version: 1.0.0 Author(s): Karolina Description: Function that takes a number as input and floors it to the nearest "20". Input parameters: Number (var_name: 'x', var_type: Integer or Float) Output: Float #Import module: #Check if input parameter is numeric: #If the 2nd digit from the decimal point is an even number: #If the 2nd digit from the decimal point is an odd number: #If input parameter is not numeric, prompt an error message: ############################################################################ Project: 'ICOS Carbon Portal' Created: Tue May 07 09:00:00 2019 Last Changed: Tue May 07 09:00:00 2019 Version: 1.0.0 Author(s): Karolina Description: Function that takes a number as input and rounds it up to the closest "20". Input parameters: Number (var_name: 'x', var_type: Integer or Float) Output: Float #Import module: #Check if input parameter is numeric: #for positive numbers, multiples of 20.0: #for positive numbers with an even number as 2nd digit: #for positive and negative numbers, whose 2nd digit #is an odd number (except for i in [-1,-9]): #for negative numbers, whose 1st or 2nd digit is an even number: #If input parameter is NOT numeric, prompt an error message: ############################################################################ ############################### round down 100 ############################# Project: 'ICOS Carbon Portal' Created: Tue May 07 09:00:00 2019 Last Changed: Tue May 07 09:00:00 2019 Version: 1.0.0 Author(s): Karolina Description: Function that takes a number as input and floors it to the nearest "100". Input parameters: Number (var_name: 'x', var_type: Integer or Float) Output: Float #Import module: #Check if input parameter is numeric: #If the number is an integral multiple of 100: #If the input number is NOT an integral multiple of 100: #If input parameter is not numeric, prompt an error message: ############################################################################## Project: 'ICOS Carbon Portal' Created: Tue May 07 09:00:00 2019 Last Changed: Tue May 07 09:00:00 2019 Version: 1.0.0 Author(s): Karolina Description: Function that takes a number as input and rounds it up to the nearest "100". Input parameters: Number (var_name: 'x', var_type: Integer or Float) Output: Float #Import modules: #Check if input parameter is numeric: #for integral mulitples of 100 and for the #special cases of 100, 0 and -100: #If input parameter is not numeric, prompt an error message: ############################################################################## | 3.832009 | 4 |
torch_sparse/__init__.py | mdiephuis/pytorch_sparse | 0 | 6619123 | <filename>torch_sparse/__init__.py
from .storage import SparseStorage
from .tensor import SparseTensor
from .transpose import t
from .narrow import narrow, __narrow_diag__
from .select import select
from .index_select import index_select, index_select_nnz
from .masked_select import masked_select, masked_select_nnz
from .diag import remove_diag, set_diag, fill_diag
from .add import add, add_, add_nnz, add_nnz_
from .mul import mul, mul_, mul_nnz, mul_nnz_
from .reduce import sum, mean, min, max
from .matmul import matmul
from .cat import cat, cat_diag
from .convert import to_torch_sparse, from_torch_sparse, to_scipy, from_scipy
from .coalesce import coalesce
from .transpose import transpose
from .eye import eye
from .spmm import spmm
from .spspmm import spspmm
__version__ = '0.5.0'
__all__ = [
'SparseStorage',
'SparseTensor',
't',
'narrow',
'__narrow_diag__',
'select',
'index_select',
'index_select_nnz',
'masked_select',
'masked_select_nnz',
'remove_diag',
'set_diag',
'fill_diag',
'add',
'add_',
'add_nnz',
'add_nnz_',
'mul',
'mul_',
'mul_nnz',
'mul_nnz_',
'sum',
'mean',
'min',
'max',
'matmul',
'cat',
'cat_diag',
'to_torch_sparse',
'from_torch_sparse',
'to_scipy',
'from_scipy',
'coalesce',
'transpose',
'eye',
'spmm',
'spspmm',
'__version__',
]
| <filename>torch_sparse/__init__.py
from .storage import SparseStorage
from .tensor import SparseTensor
from .transpose import t
from .narrow import narrow, __narrow_diag__
from .select import select
from .index_select import index_select, index_select_nnz
from .masked_select import masked_select, masked_select_nnz
from .diag import remove_diag, set_diag, fill_diag
from .add import add, add_, add_nnz, add_nnz_
from .mul import mul, mul_, mul_nnz, mul_nnz_
from .reduce import sum, mean, min, max
from .matmul import matmul
from .cat import cat, cat_diag
from .convert import to_torch_sparse, from_torch_sparse, to_scipy, from_scipy
from .coalesce import coalesce
from .transpose import transpose
from .eye import eye
from .spmm import spmm
from .spspmm import spspmm
__version__ = '0.5.0'
__all__ = [
'SparseStorage',
'SparseTensor',
't',
'narrow',
'__narrow_diag__',
'select',
'index_select',
'index_select_nnz',
'masked_select',
'masked_select_nnz',
'remove_diag',
'set_diag',
'fill_diag',
'add',
'add_',
'add_nnz',
'add_nnz_',
'mul',
'mul_',
'mul_nnz',
'mul_nnz_',
'sum',
'mean',
'min',
'max',
'matmul',
'cat',
'cat_diag',
'to_torch_sparse',
'from_torch_sparse',
'to_scipy',
'from_scipy',
'coalesce',
'transpose',
'eye',
'spmm',
'spspmm',
'__version__',
]
| none | 1 | 1.708163 | 2 | |
pydgeot/app/dirconfig.py | broiledmeat/pydgeot | 0 | 6619124 | import os
import json
class BaseDirConfig:
"""
Base app configuration for a directory.
"""
_cached = {}
""":type: dict[type, dict[str, DirConfig]]"""
def __init__(self, app, path):
"""
Initialize a new DirConfig instance for the given App. The `get` class method should be used instead of
initializing this directly.
:param app: App associated with the directory.
:type app: pydgeot.app.App
:param path: Directory to get configuration for.
:type path: str
"""
self.app = app
self.path = path
self._load()
@classmethod
def get(cls, app, path):
"""
Get a DirConfig instance for the given file or directory path.
:param app: App associated with the directory.
:type app: pydgeot.app.App
:param path:
:type path: str
"""
if os.path.isfile(path):
path = os.path.dirname(path)
if cls not in cls._cached:
cls._cached[cls] = {}
if path in cls._cached[cls]:
return cls._cached[cls][path]
config = cls(app, path)
cls._cached[cls][path] = config
return config
def _load(self):
"""
Load in the current path and parent configuration data.
"""
from pydgeot.app import AppError
config = {}
config_path = os.path.join(self.path, '{}pydgeot.conf'.format('' if self.path == self.app.root else '.'))
# Find the parent config, so it can be inherited from.
parent = None
if self.path != self.app.root:
parent_path = os.path.dirname(self.path)
parent = self.__class__.get(self.app, parent_path)
if os.path.isfile(config_path):
try:
with open(config_path) as fh:
config = json.load(fh)
except ValueError as e:
raise AppError('Could not load config \'{}\': \'{}\''.format(config_path, e))
self._parse(config_path, config, parent)
def _parse(self, config_path, config, parent):
"""
Parse current path and parent configuration data retrieved from _load.
:type config_path: str
:type config: dict[str, Any]
:type parent: DirConfig | None
"""
raise NotImplementedError
@classmethod
def _merge_dict(cls, target, source):
"""
Return a merged copy of two dictionaries. Overwriting any matching keys from the second over the first, but
merging any dictionary values.
:param target: Original dictionary to copy and update.
:type target: dict
:param source: Dictionary to update items from.
:type source: dict
:return: Copied and updated target dictionary.
:rtype: dict
"""
import copy
merged = copy.copy(target)
for key in source:
if key in merged and isinstance(merged[key], dict) and isinstance(source[key], dict):
merged[key] = cls._merge_dict(merged[key], source[key])
continue
merged[key] = source[key]
return merged
class DirConfig(BaseDirConfig):
"""
App configuration for a directory.
"""
def __init__(self, app, path):
"""
Initialize a new DirConfig instance for the given App. The `get` class method should be used instead of
initializing this directly.
:param app: App to associated with the directory.
:type app: pydgeot.app.App
:param path:
:type path: str
"""
self.processors = set()
""":type: set[pydgeot.processors.Processor]"""
self.ignore = set()
""":type: set[pydgeot.filesystem.Glob]"""
self.extra = {}
""":type: dict[str, object]"""
super().__init__(app, path)
def _parse(self, config_path, config, parent):
"""
Parse current path and parent configuration data retrieved from _load.
:type config_path: str
:type config: dict[str, Any]
:type parent: DirConfig | None
"""
from pydgeot.app import AppError
from pydgeot.filesystem import Glob
# Convert a 'processors' key to a list of processor instances.
processors = config.pop('processors', None)
if isinstance(processors, list):
for processor in processors:
processor_inst = self.app.processors.get(processor, None)
if processor_inst is not None:
self.processors.add(processor_inst)
else:
raise AppError('Could not load config \'{}\', unable to find processor: \'{}\''.format(config_path,
processor))
elif processors is None and parent is not None:
self.processors = parent.processors
# Convert an 'ignore' key to a list of matchable globs.
ignore = config.pop('ignore', None)
if isinstance(ignore, list):
for glob in ignore:
if self.path not in (self.app.root, self.app.source_root):
glob = self.app.relative_path(self.path).replace('\\', '/') + '/' + glob
try:
self.ignore.add(Glob(glob))
except ValueError:
raise AppError('Malformed glob in \'{}\': \'{}\''.format(config_path, glob))
elif ignore is None and parent is not None:
self.ignore = parent.ignore
# Any extra keys remain as a dictionary, being merged in with the parent configs extra data.
self.extra = config
if parent is not None:
self.extra = self.__class__._merge_dict(parent.extra, self.extra)
| import os
import json
class BaseDirConfig:
"""
Base app configuration for a directory.
"""
_cached = {}
""":type: dict[type, dict[str, DirConfig]]"""
def __init__(self, app, path):
"""
Initialize a new DirConfig instance for the given App. The `get` class method should be used instead of
initializing this directly.
:param app: App associated with the directory.
:type app: pydgeot.app.App
:param path: Directory to get configuration for.
:type path: str
"""
self.app = app
self.path = path
self._load()
@classmethod
def get(cls, app, path):
"""
Get a DirConfig instance for the given file or directory path.
:param app: App associated with the directory.
:type app: pydgeot.app.App
:param path:
:type path: str
"""
if os.path.isfile(path):
path = os.path.dirname(path)
if cls not in cls._cached:
cls._cached[cls] = {}
if path in cls._cached[cls]:
return cls._cached[cls][path]
config = cls(app, path)
cls._cached[cls][path] = config
return config
def _load(self):
"""
Load in the current path and parent configuration data.
"""
from pydgeot.app import AppError
config = {}
config_path = os.path.join(self.path, '{}pydgeot.conf'.format('' if self.path == self.app.root else '.'))
# Find the parent config, so it can be inherited from.
parent = None
if self.path != self.app.root:
parent_path = os.path.dirname(self.path)
parent = self.__class__.get(self.app, parent_path)
if os.path.isfile(config_path):
try:
with open(config_path) as fh:
config = json.load(fh)
except ValueError as e:
raise AppError('Could not load config \'{}\': \'{}\''.format(config_path, e))
self._parse(config_path, config, parent)
def _parse(self, config_path, config, parent):
"""
Parse current path and parent configuration data retrieved from _load.
:type config_path: str
:type config: dict[str, Any]
:type parent: DirConfig | None
"""
raise NotImplementedError
@classmethod
def _merge_dict(cls, target, source):
"""
Return a merged copy of two dictionaries. Overwriting any matching keys from the second over the first, but
merging any dictionary values.
:param target: Original dictionary to copy and update.
:type target: dict
:param source: Dictionary to update items from.
:type source: dict
:return: Copied and updated target dictionary.
:rtype: dict
"""
import copy
merged = copy.copy(target)
for key in source:
if key in merged and isinstance(merged[key], dict) and isinstance(source[key], dict):
merged[key] = cls._merge_dict(merged[key], source[key])
continue
merged[key] = source[key]
return merged
class DirConfig(BaseDirConfig):
"""
App configuration for a directory.
"""
def __init__(self, app, path):
"""
Initialize a new DirConfig instance for the given App. The `get` class method should be used instead of
initializing this directly.
:param app: App to associated with the directory.
:type app: pydgeot.app.App
:param path:
:type path: str
"""
self.processors = set()
""":type: set[pydgeot.processors.Processor]"""
self.ignore = set()
""":type: set[pydgeot.filesystem.Glob]"""
self.extra = {}
""":type: dict[str, object]"""
super().__init__(app, path)
def _parse(self, config_path, config, parent):
"""
Parse current path and parent configuration data retrieved from _load.
:type config_path: str
:type config: dict[str, Any]
:type parent: DirConfig | None
"""
from pydgeot.app import AppError
from pydgeot.filesystem import Glob
# Convert a 'processors' key to a list of processor instances.
processors = config.pop('processors', None)
if isinstance(processors, list):
for processor in processors:
processor_inst = self.app.processors.get(processor, None)
if processor_inst is not None:
self.processors.add(processor_inst)
else:
raise AppError('Could not load config \'{}\', unable to find processor: \'{}\''.format(config_path,
processor))
elif processors is None and parent is not None:
self.processors = parent.processors
# Convert an 'ignore' key to a list of matchable globs.
ignore = config.pop('ignore', None)
if isinstance(ignore, list):
for glob in ignore:
if self.path not in (self.app.root, self.app.source_root):
glob = self.app.relative_path(self.path).replace('\\', '/') + '/' + glob
try:
self.ignore.add(Glob(glob))
except ValueError:
raise AppError('Malformed glob in \'{}\': \'{}\''.format(config_path, glob))
elif ignore is None and parent is not None:
self.ignore = parent.ignore
# Any extra keys remain as a dictionary, being merged in with the parent configs extra data.
self.extra = config
if parent is not None:
self.extra = self.__class__._merge_dict(parent.extra, self.extra)
| en | 0.658736 | Base app configuration for a directory. :type: dict[type, dict[str, DirConfig]] Initialize a new DirConfig instance for the given App. The `get` class method should be used instead of initializing this directly. :param app: App associated with the directory. :type app: pydgeot.app.App :param path: Directory to get configuration for. :type path: str Get a DirConfig instance for the given file or directory path. :param app: App associated with the directory. :type app: pydgeot.app.App :param path: :type path: str Load in the current path and parent configuration data. # Find the parent config, so it can be inherited from. Parse current path and parent configuration data retrieved from _load. :type config_path: str :type config: dict[str, Any] :type parent: DirConfig | None Return a merged copy of two dictionaries. Overwriting any matching keys from the second over the first, but merging any dictionary values. :param target: Original dictionary to copy and update. :type target: dict :param source: Dictionary to update items from. :type source: dict :return: Copied and updated target dictionary. :rtype: dict App configuration for a directory. Initialize a new DirConfig instance for the given App. The `get` class method should be used instead of initializing this directly. :param app: App to associated with the directory. :type app: pydgeot.app.App :param path: :type path: str :type: set[pydgeot.processors.Processor] :type: set[pydgeot.filesystem.Glob] :type: dict[str, object] Parse current path and parent configuration data retrieved from _load. :type config_path: str :type config: dict[str, Any] :type parent: DirConfig | None # Convert a 'processors' key to a list of processor instances. # Convert an 'ignore' key to a list of matchable globs. # Any extra keys remain as a dictionary, being merged in with the parent configs extra data. | 2.739241 | 3 |
Coding Problem 3.3.10.py | opalqnka/CS1301xII | 0 | 6619125 | mystery_int = 5
#You may modify the lines of code above, but don't move them!
#When you Submit your code, we'll change these lines to
#assign different values to the variables.
#This is a tough one! Stick with it, you can do it!
#
#Write a program that will print the times table for the
#value given by mystery_int. The times table should print a
#two-column table of the products of every combination of
#two numbers from 1 through mystery_int. Separate consecutive
#numbers with either spaces or tabs, whichever you prefer.
#
#For example, if mystery_int is 5, this could print:
#
#1 2 3 4 5
#2 4 6 8 10
#3 6 9 12 15
#4 8 12 16 20
#5 10 15 20 25
#
#To do this, you'll want to use two nested for loops; the
#first one will print rows, and the second will print columns
#within each row.
#
#Hint: How can you print the numbers across the row without
#starting a new line each time? With what you know now, you
#could build the string for the row, but only print it once
#you've finished the row. There are other ways, but that's
#how to do it using only what we've covered so far.
#
#Hint 2: To insert a tab into a string, use the character
#sequence "\t". For example, "1\t2" will print as "1 2".
#
#Hint 3: Need to just start a new line without printing
#anything else? Just call print() with no arguments in the
#parentheses.
#
#Hint 4: If you're stuck, try first just printing out all
#the products in one flat list, each on its own line. Once
#that's working, then worry about how to organize it into
#a table.
#Add your code here!
row = ""
for i in range(1, mystery_int + 1):
for j in range(1, mystery_int + 1):
row += str(i*j) + " "
print(row)
row = ""
| mystery_int = 5
#You may modify the lines of code above, but don't move them!
#When you Submit your code, we'll change these lines to
#assign different values to the variables.
#This is a tough one! Stick with it, you can do it!
#
#Write a program that will print the times table for the
#value given by mystery_int. The times table should print a
#two-column table of the products of every combination of
#two numbers from 1 through mystery_int. Separate consecutive
#numbers with either spaces or tabs, whichever you prefer.
#
#For example, if mystery_int is 5, this could print:
#
#1 2 3 4 5
#2 4 6 8 10
#3 6 9 12 15
#4 8 12 16 20
#5 10 15 20 25
#
#To do this, you'll want to use two nested for loops; the
#first one will print rows, and the second will print columns
#within each row.
#
#Hint: How can you print the numbers across the row without
#starting a new line each time? With what you know now, you
#could build the string for the row, but only print it once
#you've finished the row. There are other ways, but that's
#how to do it using only what we've covered so far.
#
#Hint 2: To insert a tab into a string, use the character
#sequence "\t". For example, "1\t2" will print as "1 2".
#
#Hint 3: Need to just start a new line without printing
#anything else? Just call print() with no arguments in the
#parentheses.
#
#Hint 4: If you're stuck, try first just printing out all
#the products in one flat list, each on its own line. Once
#that's working, then worry about how to organize it into
#a table.
#Add your code here!
row = ""
for i in range(1, mystery_int + 1):
for j in range(1, mystery_int + 1):
row += str(i*j) + " "
print(row)
row = ""
| en | 0.881791 | #You may modify the lines of code above, but don't move them! #When you Submit your code, we'll change these lines to #assign different values to the variables. #This is a tough one! Stick with it, you can do it! # #Write a program that will print the times table for the #value given by mystery_int. The times table should print a #two-column table of the products of every combination of #two numbers from 1 through mystery_int. Separate consecutive #numbers with either spaces or tabs, whichever you prefer. # #For example, if mystery_int is 5, this could print: # #1 2 3 4 5 #2 4 6 8 10 #3 6 9 12 15 #4 8 12 16 20 #5 10 15 20 25 # #To do this, you'll want to use two nested for loops; the #first one will print rows, and the second will print columns #within each row. # #Hint: How can you print the numbers across the row without #starting a new line each time? With what you know now, you #could build the string for the row, but only print it once #you've finished the row. There are other ways, but that's #how to do it using only what we've covered so far. # #Hint 2: To insert a tab into a string, use the character #sequence "\t". For example, "1\t2" will print as "1 2". # #Hint 3: Need to just start a new line without printing #anything else? Just call print() with no arguments in the #parentheses. # #Hint 4: If you're stuck, try first just printing out all #the products in one flat list, each on its own line. Once #that's working, then worry about how to organize it into #a table. #Add your code here! | 4.649763 | 5 |
skbeam/core/tests/test_mask.py | mrakitin/scikit-beam | 71 | 6619126 | # ######################################################################
# Copyright (c) 2014, Brookhaven Science Associates, Brookhaven #
# National Laboratory. All rights reserved. #
# #
# Redistribution and use in source and binary forms, with or without #
# modification, are permitted provided that the following conditions #
# are met: #
# #
# * Redistributions of source code must retain the above copyright #
# notice, this list of conditions and the following disclaimer. #
# #
# * Redistributions in binary form must reproduce the above copyright #
# notice this list of conditions and the following disclaimer in #
# the documentation and/or other materials provided with the #
# distribution. #
# #
# * Neither the name of the Brookhaven Science Associates, Brookhaven #
# National Laboratory nor the names of its contributors may be used #
# to endorse or promote products derived from this software without #
# specific prior written permission. #
# #
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS #
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT #
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS #
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE #
# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, #
# INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES #
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR #
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) #
# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, #
# STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OTHERWISE) ARISING #
# IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE #
# POSSIBILITY OF SUCH DAMAGE. #
########################################################################
from __future__ import absolute_import, division, print_function
import logging
import numpy as np
from numpy.testing import assert_array_equal
import skbeam.core.mask as mask
logger = logging.getLogger(__name__)
def test_threshold_mask():
xdim = 10
ydim = 10
stack_size = 10
img_stack = np.random.randint(1, 3, (stack_size, xdim, ydim))
img_stack[0][0, 1] = 100
img_stack[0][9, 1] = 98
img_stack[6][8, 8] = 75
img_stack[7][6, 6] = 80
th = mask.threshold(img_stack, 75)
for final in th:
pass
y = np.ones_like(img_stack[0])
y[0, 1] = 0
y[9, 1] = 0
y[8, 8] = 0
y[6, 6] = 0
assert_array_equal(final, y)
def test_bad_to_nan_gen():
xdim = 2
ydim = 2
stack_size = 5
img_stack = np.random.randint(1, 3, (stack_size, xdim, ydim))
bad_list = [1, 3]
img = mask.bad_to_nan_gen(img_stack, bad_list)
y = []
for im in img:
y.append(im)
assert np.isnan(np.asarray(y)[1]).all()
assert np.isnan(np.asarray(y)[3]).all()
assert not np.isnan(np.asarray(y)[4]).all()
def test_margin():
size = (10, 10)
edge = 1
mask1 = mask.margin(size, edge)
mask2 = np.zeros(size)
mask2[:, :edge] = 1
mask2[:, -edge:] = 1
mask2[:edge, :] = 1
mask2[-edge:, :] = 1
mask2 = mask2.astype(bool)
assert_array_equal(mask1, ~mask2)
def test_ring_blur_mask():
from skbeam.core import recip
g = recip.geo.Geometry(
detector='Perkin', pixel1=.0002, pixel2=.0002,
dist=.23,
poni1=.209, poni2=.207,
# rot1=.0128, rot2=-.015, rot3=-5.2e-8,
wavelength=1.43e-11
)
r = g.rArray((2048, 2048))
# make some sample data
Z = 100 * np.cos(50 * r) ** 2 + 150
np.random.seed(10)
pixels = []
for i in range(0, 100):
a, b = np.random.randint(low=0, high=2048), \
np.random.randint(low=0, high=2048)
if np.random.random() > .5:
# Add some hot pixels
Z[a, b] = np.random.randint(low=200, high=255)
else:
# and dead pixels
Z[a, b] = np.random.randint(low=0, high=10)
pixels.append((a, b))
pixel_size = [getattr(g, k) for k in ['pixel1', 'pixel2']]
rres = np.hypot(*pixel_size)
bins = np.arange(np.min(r) - rres/2., np.max(r) + rres / 2., rres)
msk = mask.binned_outlier(Z, r, (3., 3), bins, mask=None)
a = set(zip(*np.nonzero(~msk)))
b = set(pixels)
a_not_in_b = a - b
b_not_in_a = b - a
# We have not over masked 10% of the number of bad pixels
assert len(a_not_in_b) / len(b) < .1
# Make certain that we have masked over 90% of the bad pixels
assert len(b_not_in_a) / len(b) < .1
| # ######################################################################
# Copyright (c) 2014, Brookhaven Science Associates, Brookhaven #
# National Laboratory. All rights reserved. #
# #
# Redistribution and use in source and binary forms, with or without #
# modification, are permitted provided that the following conditions #
# are met: #
# #
# * Redistributions of source code must retain the above copyright #
# notice, this list of conditions and the following disclaimer. #
# #
# * Redistributions in binary form must reproduce the above copyright #
# notice this list of conditions and the following disclaimer in #
# the documentation and/or other materials provided with the #
# distribution. #
# #
# * Neither the name of the Brookhaven Science Associates, Brookhaven #
# National Laboratory nor the names of its contributors may be used #
# to endorse or promote products derived from this software without #
# specific prior written permission. #
# #
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS #
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT #
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS #
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE #
# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, #
# INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES #
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR #
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) #
# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, #
# STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OTHERWISE) ARISING #
# IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE #
# POSSIBILITY OF SUCH DAMAGE. #
########################################################################
from __future__ import absolute_import, division, print_function
import logging
import numpy as np
from numpy.testing import assert_array_equal
import skbeam.core.mask as mask
logger = logging.getLogger(__name__)
def test_threshold_mask():
xdim = 10
ydim = 10
stack_size = 10
img_stack = np.random.randint(1, 3, (stack_size, xdim, ydim))
img_stack[0][0, 1] = 100
img_stack[0][9, 1] = 98
img_stack[6][8, 8] = 75
img_stack[7][6, 6] = 80
th = mask.threshold(img_stack, 75)
for final in th:
pass
y = np.ones_like(img_stack[0])
y[0, 1] = 0
y[9, 1] = 0
y[8, 8] = 0
y[6, 6] = 0
assert_array_equal(final, y)
def test_bad_to_nan_gen():
xdim = 2
ydim = 2
stack_size = 5
img_stack = np.random.randint(1, 3, (stack_size, xdim, ydim))
bad_list = [1, 3]
img = mask.bad_to_nan_gen(img_stack, bad_list)
y = []
for im in img:
y.append(im)
assert np.isnan(np.asarray(y)[1]).all()
assert np.isnan(np.asarray(y)[3]).all()
assert not np.isnan(np.asarray(y)[4]).all()
def test_margin():
size = (10, 10)
edge = 1
mask1 = mask.margin(size, edge)
mask2 = np.zeros(size)
mask2[:, :edge] = 1
mask2[:, -edge:] = 1
mask2[:edge, :] = 1
mask2[-edge:, :] = 1
mask2 = mask2.astype(bool)
assert_array_equal(mask1, ~mask2)
def test_ring_blur_mask():
from skbeam.core import recip
g = recip.geo.Geometry(
detector='Perkin', pixel1=.0002, pixel2=.0002,
dist=.23,
poni1=.209, poni2=.207,
# rot1=.0128, rot2=-.015, rot3=-5.2e-8,
wavelength=1.43e-11
)
r = g.rArray((2048, 2048))
# make some sample data
Z = 100 * np.cos(50 * r) ** 2 + 150
np.random.seed(10)
pixels = []
for i in range(0, 100):
a, b = np.random.randint(low=0, high=2048), \
np.random.randint(low=0, high=2048)
if np.random.random() > .5:
# Add some hot pixels
Z[a, b] = np.random.randint(low=200, high=255)
else:
# and dead pixels
Z[a, b] = np.random.randint(low=0, high=10)
pixels.append((a, b))
pixel_size = [getattr(g, k) for k in ['pixel1', 'pixel2']]
rres = np.hypot(*pixel_size)
bins = np.arange(np.min(r) - rres/2., np.max(r) + rres / 2., rres)
msk = mask.binned_outlier(Z, r, (3., 3), bins, mask=None)
a = set(zip(*np.nonzero(~msk)))
b = set(pixels)
a_not_in_b = a - b
b_not_in_a = b - a
# We have not over masked 10% of the number of bad pixels
assert len(a_not_in_b) / len(b) < .1
# Make certain that we have masked over 90% of the bad pixels
assert len(b_not_in_a) / len(b) < .1
| en | 0.662556 | # ###################################################################### # Copyright (c) 2014, Brookhaven Science Associates, Brookhaven # # National Laboratory. All rights reserved. # # # # Redistribution and use in source and binary forms, with or without # # modification, are permitted provided that the following conditions # # are met: # # # # * Redistributions of source code must retain the above copyright # # notice, this list of conditions and the following disclaimer. # # # # * Redistributions in binary form must reproduce the above copyright # # notice this list of conditions and the following disclaimer in # # the documentation and/or other materials provided with the # # distribution. # # # # * Neither the name of the Brookhaven Science Associates, Brookhaven # # National Laboratory nor the names of its contributors may be used # # to endorse or promote products derived from this software without # # specific prior written permission. # # # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS # # FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE # # COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, # # INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES # # (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR # # SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) # # HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, # # STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OTHERWISE) ARISING # # IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # # POSSIBILITY OF SUCH DAMAGE. # ######################################################################## # rot1=.0128, rot2=-.015, rot3=-5.2e-8, # make some sample data # Add some hot pixels # and dead pixels # We have not over masked 10% of the number of bad pixels # Make certain that we have masked over 90% of the bad pixels | 0.834305 | 1 |
userinput/utils/clear.py | LucaCappelletti94/userinput | 1 | 6619127 | import os
from IPython.display import clear_output
def clear():
if os.name == "posix":
os.system('clear')
else:
os.system('cls')
clear_output(wait=True) | import os
from IPython.display import clear_output
def clear():
if os.name == "posix":
os.system('clear')
else:
os.system('cls')
clear_output(wait=True) | none | 1 | 2.217152 | 2 | |
FutuAlgo/advanced/utilities.py | johncky/NiuNiuAlgo | 0 | 6619128 | <gh_stars>0
from algo import Algo
class Backtest(Algo):
def __init__(self, name: str, bars_no: int, benchmark: str = 'HSI'):
super().__init__(name=name, benchmark=benchmark)
self.bars_no = bars_no
self._spread = 0
self._order_queue = None
self._cur_candlestick_datetime = None
def determine_trigger(self, datatype, ticker, df):
if 'K_' in datatype:
return True, (
datatype, ticker, self.get_data(datatype=datatype, ticker=ticker, n_rows=self.bars_no))
else:
return True, (datatype, ticker, df)
def initialize(self, initial_capital: float,
hook_ip: str,
trading_universe: list, datatypes: list,
txn_cost: float = 30, cache_rows: int = 3000,
test_mq_con=False, spread: float = 0.2 / 100, **kwargs):
super().initialize(initial_capital=initial_capital, mq_ip='', hook_ip=hook_ip,
trading_environment='BACKTEST',
trading_universe=trading_universe,
txn_cost=txn_cost, cache_rows=cache_rows,
test_mq_con=test_mq_con, spread=spread,
datatypes=datatypes)
self._spread = spread
self._order_queue = list()
def backtest(self, start_date, end_date):
if not self._initialized:
self._logger.debug('Algo not initialized')
return
self._logger.debug(f'Backtesting Starts...')
# No need to load ticker cache
succeed, failed = self._data.download_ticker_lot_size(tickers=self._data.universe)
for ticker in succeed:
self._account.add_new_position(ticker)
self._logger.debug(f'Loading Date from MySQL DB...')
backtest_df = pd.DataFrame()
for tk in self._data.universe:
for dtype in self._data.datatypes:
ret_code, df = self._data.download_historical(ticker=tk, datatype=dtype, start_date=start_date,
end_date=end_date)
if ret_code != 1 or df.shape[0] == 0:
msg = f'Failed to download data {dtype} {tk} from Hook, please ensure data is in MySQL Db'
self._logger.error(msg)
raise Exception(msg)
else:
df['datatype'] = dtype
# TODO: different bars_no for different datatype
filler = df.iloc[:self.bars_no]
df = df.iloc[self.bars_no:]
if df.shape[0] > 0:
backtest_df = backtest_df.append(df)
self._data.add_cache(datatype=dtype, df=filler, ticker=tk)
self._logger.debug(
f'Backtesting {tk} from {df["datetime"].iloc[0]}')
else:
self._logger.warn(f'Not Enough bars to backtest {dtype}.{tk}')
continue
backtest_df = backtest_df.sort_values(by=['datetime', 'datatype', 'ticker'], ascending=True)
self._logger.debug(f'Loaded Data, backtesting starts...')
async def _backtest():
self._order_queue = list()
# For progress bar
last_percent = 0
self._account.log(overwrite_date=backtest_df.iloc[0]['datetime'].date())
for i in range(backtest_df.shape[0]):
cur_df = backtest_df.iloc[i:i + 1]
datatype = cur_df['datatype'].iloc[-1]
ticker = cur_df['ticker'].iloc[-1]
self._cur_candlestick_datetime = cur_df['datetime'].iloc[-1]
# trigger orderUpdate first
if len(self._order_queue) != 0:
tmp_order_queue = list()
for order_no in range(len(self._order_queue)):
action_type, data = self._order_queue.pop()
if action_type == 'UPDATE':
await self.on_order_update(order_id=data['order_id'].iloc[-1], df=data)
elif action_type == 'EXECUTE':
if datatype == data['datatype'] and ticker == data['ticker']:
self.trade(ticker=data['ticker'], trade_side=data['trade_side'],
order_type='MARKET', quantity=data['quantity'],
price=cur_df['open'].iloc[-1])
else:
tmp_order_queue.append((action_type, data))
self._order_queue = self._order_queue + tmp_order_queue
# Progress Bar
cur_percent = int(i / backtest_df.shape[0] * 100)
if cur_percent != last_percent:
print(f'Progress: |{cur_percent * "#"}{(100 - cur_percent) * " "}| {i}/{backtest_df.shape[0]}')
last_percent = cur_percent
# log performance
if i > 0:
last_date = backtest_df.iloc[i - 1:i]['datetime'].iloc[0].date()
if self._cur_candlestick_datetime.date() != last_date:
self._account.log(overwrite_date=last_date)
self._account.update_prices(datatype=datatype, df=cur_df)
self._data.add_cache(datatype=datatype, df=cur_df, ticker=ticker)
trigger_strat, (tgr_dtype, tgr_ticker, tgr_df) = self.determine_trigger(datatype=datatype,
ticker=ticker, df=cur_df)
if trigger_strat:
await self.trigger_strat(datatype=tgr_dtype, ticker=tgr_ticker, df=tgr_df)
self._logger.debug('Backtesting Completed! Call report() method to see backtesting result!')
asyncio.run(_backtest())
def trade(self, ticker, trade_side, order_type, quantity, price):
lot_size = self.get_lot_size(ticker=ticker)
risk_passed, msg = self._account.pre_trade_check(ticker=ticker, quantity=quantity, trade_side=trade_side,
price=price, lot_size=lot_size)
if not risk_passed:
self._logger.warn(
f'Risk check for order "{trade_side} {quantity} qty of {ticker} @ {price}" did not pass, reasons: {msg}')
backtest_trade = {'order_id': hash(random.random()), 'ticker': ticker, 'price': 0,
'trd_side': trade_side, 'order_status': 'FAILED',
'dealt_avg_price': 0,
'dealt_qty': 0, 'created_time': 0, 'last_err_msg': f'Risk check failed: {msg}'}
order_update_df = pd.DataFrame(backtest_trade, index=[0])
self._order_queue.append(('UPDATE', order_update_df))
return 0, f'Risk check failed: {msg}'
spread_ajust_sign = 1 if 'BUY' in trade_side else -1
order_datetime = self._cur_candlestick_datetime
spread_adjusted_price = price * (1 + (spread_ajust_sign * self._spread))
backtest_trade = {'order_id': hash(random.random()), 'ticker': ticker, 'price': price,
'trd_side': trade_side, 'order_status': 'FILLED_ALL',
'dealt_avg_price': spread_adjusted_price,
'dealt_qty': quantity, 'created_time': order_datetime}
order_update_df = pd.DataFrame(backtest_trade, index=[0])
self._account.update_positions(df=order_update_df)
self._order_queue.append(('UPDATE', order_update_df))
return 1, f'Placed order: {order_type} {quantity} qty of {ticker} @ {price}'
def buy_market(self, ticker, quantity):
# Buy at current close
return self.trade(ticker=ticker, quantity=quantity, trade_side='BUY', order_type='MARKET',
price=self.get_latest_price(ticker))
def sell_market(self, ticker, quantity):
# Sell at current close
return self.trade(ticker=ticker, quantity=quantity, trade_side='SELL', order_type='MARKET',
price=self.get_latest_price(ticker))
def buy_limit(self, ticker, quantity, price):
# Buy at current close
return self.trade(ticker=ticker, quantity=quantity, trade_side='BUY', order_type='NORMAL',
price=self.get_latest_price(ticker))
def sell_limit(self, ticker, quantity, price):
# Sell at current close
return self.trade(ticker=ticker, quantity=quantity, trade_side='SELL', order_type='NORMAL',
price=self.get_latest_price(ticker))
def buy_next_open(self, datatype, ticker, quantity):
# Buy at next open of that datatype
self._order_queue.append(
('EXECUTE', {'datatype': datatype, 'ticker': ticker, 'quantity': quantity, 'trade_side': 'BUY'}))
return 1, f'Buy {quantity} {ticker} @ next {datatype} open'
def sell_next_open(self, datatype, ticker, quantity):
# Sell at next open of that datatype
self._order_queue.append(
('EXECUTE', {'datatype': datatype, 'ticker': ticker, 'quantity': quantity, 'trade_side': 'SELL'}))
return 1, f'Sell {quantity} {ticker} @ next {datatype} open'
# ------------------------------------------------ [ Report ] ------------------------------------------
def plot_ticker_trades(self, datatype, ticker):
completed_orders = self._account.completed_orders
orders_df = completed_orders.loc[completed_orders.ticker == ticker].rename(
columns={'created_time': 'datetime'})
ticker_df = self._data.cache[datatype][ticker]
ticker_df = ticker_df.merge(orders_df[['datetime', 'trd_side']], how='left', on=['datetime'])
ticker_df = ticker_df.fillna(0)
ticker_df['buy_pt'] = [1 if 'BUY' in str(x) else None for x in ticker_df['trd_side']]
ticker_df['sell_pt'] = [1 if 'SELL' in str(x) else None for x in ticker_df['trd_side']]
ticker_df['buy_y'] = ticker_df['buy_pt'] * ticker_df['close']
ticker_df['sell_y'] = ticker_df['sell_pt'] * ticker_df['close']
ticker_df['x'] = ticker_df['datetime']
from matplotlib import pyplot as plt
plt.scatter(x=ticker_df['x'], y=ticker_df['buy_y'].values, marker='o', color='green', s=100)
plt.scatter(x=ticker_df['x'], y=ticker_df['sell_y'].values, marker='o', color='red', s=100)
plt.ylabel(f'{ticker} price')
plt.plot(ticker_df['x'], ticker_df['close'])
plt.title(f'{ticker} entry-exit points')
def report(self, benchmark):
import quantstats as qs
import webbrowser
PV = self._account.records['PV']
PV.index = pd.to_datetime(PV.index)
PV.index.name = 'datetime'
PV = PV.resample('1D').last().fillna(method='ffill')
html = f'{self.name}.html'
qs.reports.html(PV, benchmark, output=html, title=f'{self.name}')
webbrowser.open(html)
| from algo import Algo
class Backtest(Algo):
def __init__(self, name: str, bars_no: int, benchmark: str = 'HSI'):
super().__init__(name=name, benchmark=benchmark)
self.bars_no = bars_no
self._spread = 0
self._order_queue = None
self._cur_candlestick_datetime = None
def determine_trigger(self, datatype, ticker, df):
if 'K_' in datatype:
return True, (
datatype, ticker, self.get_data(datatype=datatype, ticker=ticker, n_rows=self.bars_no))
else:
return True, (datatype, ticker, df)
def initialize(self, initial_capital: float,
hook_ip: str,
trading_universe: list, datatypes: list,
txn_cost: float = 30, cache_rows: int = 3000,
test_mq_con=False, spread: float = 0.2 / 100, **kwargs):
super().initialize(initial_capital=initial_capital, mq_ip='', hook_ip=hook_ip,
trading_environment='BACKTEST',
trading_universe=trading_universe,
txn_cost=txn_cost, cache_rows=cache_rows,
test_mq_con=test_mq_con, spread=spread,
datatypes=datatypes)
self._spread = spread
self._order_queue = list()
def backtest(self, start_date, end_date):
if not self._initialized:
self._logger.debug('Algo not initialized')
return
self._logger.debug(f'Backtesting Starts...')
# No need to load ticker cache
succeed, failed = self._data.download_ticker_lot_size(tickers=self._data.universe)
for ticker in succeed:
self._account.add_new_position(ticker)
self._logger.debug(f'Loading Date from MySQL DB...')
backtest_df = pd.DataFrame()
for tk in self._data.universe:
for dtype in self._data.datatypes:
ret_code, df = self._data.download_historical(ticker=tk, datatype=dtype, start_date=start_date,
end_date=end_date)
if ret_code != 1 or df.shape[0] == 0:
msg = f'Failed to download data {dtype} {tk} from Hook, please ensure data is in MySQL Db'
self._logger.error(msg)
raise Exception(msg)
else:
df['datatype'] = dtype
# TODO: different bars_no for different datatype
filler = df.iloc[:self.bars_no]
df = df.iloc[self.bars_no:]
if df.shape[0] > 0:
backtest_df = backtest_df.append(df)
self._data.add_cache(datatype=dtype, df=filler, ticker=tk)
self._logger.debug(
f'Backtesting {tk} from {df["datetime"].iloc[0]}')
else:
self._logger.warn(f'Not Enough bars to backtest {dtype}.{tk}')
continue
backtest_df = backtest_df.sort_values(by=['datetime', 'datatype', 'ticker'], ascending=True)
self._logger.debug(f'Loaded Data, backtesting starts...')
async def _backtest():
self._order_queue = list()
# For progress bar
last_percent = 0
self._account.log(overwrite_date=backtest_df.iloc[0]['datetime'].date())
for i in range(backtest_df.shape[0]):
cur_df = backtest_df.iloc[i:i + 1]
datatype = cur_df['datatype'].iloc[-1]
ticker = cur_df['ticker'].iloc[-1]
self._cur_candlestick_datetime = cur_df['datetime'].iloc[-1]
# trigger orderUpdate first
if len(self._order_queue) != 0:
tmp_order_queue = list()
for order_no in range(len(self._order_queue)):
action_type, data = self._order_queue.pop()
if action_type == 'UPDATE':
await self.on_order_update(order_id=data['order_id'].iloc[-1], df=data)
elif action_type == 'EXECUTE':
if datatype == data['datatype'] and ticker == data['ticker']:
self.trade(ticker=data['ticker'], trade_side=data['trade_side'],
order_type='MARKET', quantity=data['quantity'],
price=cur_df['open'].iloc[-1])
else:
tmp_order_queue.append((action_type, data))
self._order_queue = self._order_queue + tmp_order_queue
# Progress Bar
cur_percent = int(i / backtest_df.shape[0] * 100)
if cur_percent != last_percent:
print(f'Progress: |{cur_percent * "#"}{(100 - cur_percent) * " "}| {i}/{backtest_df.shape[0]}')
last_percent = cur_percent
# log performance
if i > 0:
last_date = backtest_df.iloc[i - 1:i]['datetime'].iloc[0].date()
if self._cur_candlestick_datetime.date() != last_date:
self._account.log(overwrite_date=last_date)
self._account.update_prices(datatype=datatype, df=cur_df)
self._data.add_cache(datatype=datatype, df=cur_df, ticker=ticker)
trigger_strat, (tgr_dtype, tgr_ticker, tgr_df) = self.determine_trigger(datatype=datatype,
ticker=ticker, df=cur_df)
if trigger_strat:
await self.trigger_strat(datatype=tgr_dtype, ticker=tgr_ticker, df=tgr_df)
self._logger.debug('Backtesting Completed! Call report() method to see backtesting result!')
asyncio.run(_backtest())
def trade(self, ticker, trade_side, order_type, quantity, price):
lot_size = self.get_lot_size(ticker=ticker)
risk_passed, msg = self._account.pre_trade_check(ticker=ticker, quantity=quantity, trade_side=trade_side,
price=price, lot_size=lot_size)
if not risk_passed:
self._logger.warn(
f'Risk check for order "{trade_side} {quantity} qty of {ticker} @ {price}" did not pass, reasons: {msg}')
backtest_trade = {'order_id': hash(random.random()), 'ticker': ticker, 'price': 0,
'trd_side': trade_side, 'order_status': 'FAILED',
'dealt_avg_price': 0,
'dealt_qty': 0, 'created_time': 0, 'last_err_msg': f'Risk check failed: {msg}'}
order_update_df = pd.DataFrame(backtest_trade, index=[0])
self._order_queue.append(('UPDATE', order_update_df))
return 0, f'Risk check failed: {msg}'
spread_ajust_sign = 1 if 'BUY' in trade_side else -1
order_datetime = self._cur_candlestick_datetime
spread_adjusted_price = price * (1 + (spread_ajust_sign * self._spread))
backtest_trade = {'order_id': hash(random.random()), 'ticker': ticker, 'price': price,
'trd_side': trade_side, 'order_status': 'FILLED_ALL',
'dealt_avg_price': spread_adjusted_price,
'dealt_qty': quantity, 'created_time': order_datetime}
order_update_df = pd.DataFrame(backtest_trade, index=[0])
self._account.update_positions(df=order_update_df)
self._order_queue.append(('UPDATE', order_update_df))
return 1, f'Placed order: {order_type} {quantity} qty of {ticker} @ {price}'
def buy_market(self, ticker, quantity):
# Buy at current close
return self.trade(ticker=ticker, quantity=quantity, trade_side='BUY', order_type='MARKET',
price=self.get_latest_price(ticker))
def sell_market(self, ticker, quantity):
# Sell at current close
return self.trade(ticker=ticker, quantity=quantity, trade_side='SELL', order_type='MARKET',
price=self.get_latest_price(ticker))
def buy_limit(self, ticker, quantity, price):
# Buy at current close
return self.trade(ticker=ticker, quantity=quantity, trade_side='BUY', order_type='NORMAL',
price=self.get_latest_price(ticker))
def sell_limit(self, ticker, quantity, price):
# Sell at current close
return self.trade(ticker=ticker, quantity=quantity, trade_side='SELL', order_type='NORMAL',
price=self.get_latest_price(ticker))
def buy_next_open(self, datatype, ticker, quantity):
# Buy at next open of that datatype
self._order_queue.append(
('EXECUTE', {'datatype': datatype, 'ticker': ticker, 'quantity': quantity, 'trade_side': 'BUY'}))
return 1, f'Buy {quantity} {ticker} @ next {datatype} open'
def sell_next_open(self, datatype, ticker, quantity):
# Sell at next open of that datatype
self._order_queue.append(
('EXECUTE', {'datatype': datatype, 'ticker': ticker, 'quantity': quantity, 'trade_side': 'SELL'}))
return 1, f'Sell {quantity} {ticker} @ next {datatype} open'
# ------------------------------------------------ [ Report ] ------------------------------------------
def plot_ticker_trades(self, datatype, ticker):
completed_orders = self._account.completed_orders
orders_df = completed_orders.loc[completed_orders.ticker == ticker].rename(
columns={'created_time': 'datetime'})
ticker_df = self._data.cache[datatype][ticker]
ticker_df = ticker_df.merge(orders_df[['datetime', 'trd_side']], how='left', on=['datetime'])
ticker_df = ticker_df.fillna(0)
ticker_df['buy_pt'] = [1 if 'BUY' in str(x) else None for x in ticker_df['trd_side']]
ticker_df['sell_pt'] = [1 if 'SELL' in str(x) else None for x in ticker_df['trd_side']]
ticker_df['buy_y'] = ticker_df['buy_pt'] * ticker_df['close']
ticker_df['sell_y'] = ticker_df['sell_pt'] * ticker_df['close']
ticker_df['x'] = ticker_df['datetime']
from matplotlib import pyplot as plt
plt.scatter(x=ticker_df['x'], y=ticker_df['buy_y'].values, marker='o', color='green', s=100)
plt.scatter(x=ticker_df['x'], y=ticker_df['sell_y'].values, marker='o', color='red', s=100)
plt.ylabel(f'{ticker} price')
plt.plot(ticker_df['x'], ticker_df['close'])
plt.title(f'{ticker} entry-exit points')
def report(self, benchmark):
import quantstats as qs
import webbrowser
PV = self._account.records['PV']
PV.index = pd.to_datetime(PV.index)
PV.index.name = 'datetime'
PV = PV.resample('1D').last().fillna(method='ffill')
html = f'{self.name}.html'
qs.reports.html(PV, benchmark, output=html, title=f'{self.name}')
webbrowser.open(html) | en | 0.616076 | # No need to load ticker cache # TODO: different bars_no for different datatype # For progress bar # trigger orderUpdate first # Progress Bar # log performance # Buy at current close # Sell at current close # Buy at current close # Sell at current close # Buy at next open of that datatype # Sell at next open of that datatype # ------------------------------------------------ [ Report ] ------------------------------------------ | 2.446223 | 2 |
avisys/avisys/custom_scripts/jinja_file/jinja_file.py | DeepakPawar1/avisys | 0 | 6619129 | <filename>avisys/avisys/custom_scripts/jinja_file/jinja_file.py
from __future__ import unicode_literals
from frappe.model.document import Document
import frappe
from frappe.utils import flt,today
from frappe import _
import decimal
import json
from datetime import datetime, timedelta
@frappe.whitelist()
def get_employee_transfer_data(doc):
date = datetime.strptime(str(doc.transfer_date), '%Y-%m-%d')- timedelta(days=1)
data={"designation" :[None,None],"department":[None,None]}
data['date']=date.strftime("%d %b, %Y")
for i in doc.transfer_details:
if i.property == "Designation":
data['designation'] = [i.current if i.current else "", i.new if i.new else ""]
if i.property == "Department":
data['department'] = [i.current if i.current else "", i.new if i.new else ""]
return data
@frappe.whitelist()
def get_table_data_for_promotion(doc):
data = frappe.db.sql("""select name,from_date,salary_structure from `tabSalary Structure Assignment` where employee = '%s' group by from_date desc """%(doc.employee))
send_data = {}
total_fixed_pay_monthly =0
total_fixed_pay_annually=0
total_fixed_deduction_annually=0
prev_sal_struct=0
items = ['basic_salary','house_rent_allowance','personal_allowance','conveyance_allowance','basic_salary','provident_fund','professional_tax']
for i in items:
send_data[i]=[0,0]
send_data['total_fixed_deduction_annually'] = 0
send_data['total_fixed_pay_monthly'] = 0
send_data['total_fixed_pay_annually'] = 0
send_data['gross'] = 0
send_data['prev_sal_struct']=0
if len(data) >= 2 :
doc_salary_structure = frappe.get_doc("Salary Structure",data[0][2])
for i in doc_salary_structure.earnings:
if i.salary_component == "Basic Salary":
send_data['basic_salary']=[i.amount,12 * i.amount]
total_fixed_pay_monthly += i.amount
total_fixed_pay_annually += 12 * i.amount
if i.salary_component == "House Rent Allowance":
send_data['house_rent_allowance']=[i.amount,12 * i.amount]
total_fixed_pay_monthly += i.amount
total_fixed_pay_annually += 12 * i.amount
if i.salary_component == "Personal Allowance":
send_data['personal_allowance']=[i.amount,12 * i.amount]
total_fixed_pay_monthly += i.amount
total_fixed_pay_annually += 12 * i.amount
if i.salary_component == "Conveyance Allowance/Expenses":
send_data['conveyance_allowance']=[i.amount,12 * i.amount]
total_fixed_pay_monthly += i.amount
total_fixed_pay_annually += 12 * i.amount
for i in doc_salary_structure.deductions:
if i.salary_component == "Provident Fund":
send_data['provident_fund']=[i.amount,12 * i.amount]
total_fixed_deduction_annually += 12 * i.amount
if i.salary_component == "Professional Tax":
send_data['professional_tax']=[i.amount,12 * i.amount]
total_fixed_deduction_annually += 12 * i.amount
send_data['total_fixed_deduction_annually'] = total_fixed_deduction_annually
send_data['total_fixed_pay_monthly'] = total_fixed_pay_monthly
send_data['total_fixed_pay_annually'] = total_fixed_pay_annually
send_data['gross'] = send_data['total_fixed_pay_annually']-send_data['total_fixed_deduction_annually']
doc_salary_structure_two = frappe.get_doc("Salary Structure",data[1][2])
for i in doc_salary_structure_two.earnings:
prev_sal_struct += i.amount
send_data['prev_sal_struct']=prev_sal_struct
#for i in doc_salary_structure_two.deductions:
#prev_sal_struct -= i.amount
return send_data
@frappe.whitelist()
def get_job_offer_data(doc):
class Dict2Class(object):
def __init__(self, my_dict):
for key in my_dict:
setattr(self, key, my_dict[key])
return get_tableA_data(Dict2Class(doc))
@frappe.whitelist()
def get_tableA_data(doc):
data = frappe.db.sql("""select name,from_date,salary_structure from `tabSalary Structure Assignment` where employee = '%s' group by from_date desc """%(doc.employee))
send_data = {}
total_fixed_pay_monthly =0
total_fixed_pay_annually=0
total_fixed_deduction_annually=0
items = ['basic_salary','house_rent_allowance','personal_allowance','conveyance_allowance','basic_salary','provident_fund','professional_tax']
for i in items:
send_data[i]=[0,0]
send_data['total_fixed_deduction_annually'] = 0
send_data['total_fixed_pay_monthly'] = 0
send_data['total_fixed_pay_annually'] = 0
send_data['gross'] = 0
if data:
doc_salary_structure = frappe.get_doc("Salary Structure",data[0][2])
for i in doc_salary_structure.earnings:
if i.salary_component == "Basic Salary":
send_data['basic_salary']=[i.amount,12 * i.amount]
total_fixed_pay_monthly += i.amount
total_fixed_pay_annually += 12 * i.amount
if i.salary_component == "House Rent Allowance":
send_data['house_rent_allowance']=[i.amount,12 * i.amount]
total_fixed_pay_monthly += i.amount
total_fixed_pay_annually += 12 * i.amount
if i.salary_component == "Personal Allowance":
send_data['personal_allowance']=[i.amount,12 * i.amount]
total_fixed_pay_monthly += i.amount
total_fixed_pay_annually += 12 * i.amount
if i.salary_component == "Conveyance Allowance/Expenses":
send_data['conveyance_allowance']=[i.amount,12 * i.amount]
total_fixed_pay_monthly += i.amount
total_fixed_pay_annually += 12 * i.amount
for i in doc_salary_structure.deductions:
if i.salary_component == "Provident Fund":
send_data['provident_fund']=[i.amount,12 * i.amount]
total_fixed_deduction_annually += 12 * i.amount
if i.salary_component == "Professional Tax":
send_data['professional_tax']=[i.amount,12 * i.amount]
total_fixed_deduction_annually += 12 * i.amount
send_data['total_fixed_deduction_annually'] = total_fixed_deduction_annually
send_data['total_fixed_pay_monthly'] = total_fixed_pay_monthly
send_data['total_fixed_pay_annually'] = total_fixed_pay_annually
send_data['gross'] = send_data['total_fixed_pay_annually']-send_data['total_fixed_deduction_annually']
data_bahrain = frappe.db.sql("""select ssa.name,ssa.from_date,ssa.salary_structure from `tabSalary Structure Assignment` ssa inner join `tabSalary Structure` ss on ssa.salary_structure = ss.name and ssa.employee = '%s' and ss.for_reference_purpose = 1 group by ssa.from_date desc """%(doc.employee))
total_earnings = 0
total_deduction = 0
basic_salary_bh = 0
send_bahrain_data={}
bahrain_items = ['basic_salary','house_rent_allowance','conveyance_allowance','client_engagement_allowance','siogosi','advance']
for j in bahrain_items:
send_bahrain_data[j]=0
send_bahrain_data['total_deduction']=0
send_bahrain_data['total_earnings']=0
send_bahrain_data['gross']=0
if data_bahrain:
doc_salary_structure = frappe.get_doc("Salary Structure",data_bahrain[0][2])
for i in doc_salary_structure.earnings:
if i.salary_component == "Basic Salary":
basic_salary_bh = i.amount
send_bahrain_data['basic_salary']=i.amount
total_earnings += i.amount
if i.salary_component == "House Rent Allowance":
send_bahrain_data['house_rent_allowance']=i.amount
total_earnings += i.amount
if i.salary_component == "Client Engagement Allowance":
send_bahrain_data['client_engagement_allowance']=i.amount
total_earnings += i.amount
if i.salary_component == "Conveyance Allowance/Expenses":
send_bahrain_data['conveyance_allowance']=i.amount
total_earnings += i.amount
for i in doc_salary_structure.deductions:
if i.salary_component == "SIO/GOSI":
send_bahrain_data['siogosi']=i.amount
total_deduction += basic_salary_bh * (i.amount/100)
if i.salary_component == "Advance":
send_bahrain_data['advance']=i.amount
total_deduction += i.amount
send_bahrain_data['total_deduction']=total_deduction
send_bahrain_data['total_earnings']=total_earnings
send_bahrain_data['gross']=send_bahrain_data['total_earnings']- send_bahrain_data['total_deduction']
send_data['bahrain_data'] = send_bahrain_data
return send_data
| <filename>avisys/avisys/custom_scripts/jinja_file/jinja_file.py
from __future__ import unicode_literals
from frappe.model.document import Document
import frappe
from frappe.utils import flt,today
from frappe import _
import decimal
import json
from datetime import datetime, timedelta
@frappe.whitelist()
def get_employee_transfer_data(doc):
date = datetime.strptime(str(doc.transfer_date), '%Y-%m-%d')- timedelta(days=1)
data={"designation" :[None,None],"department":[None,None]}
data['date']=date.strftime("%d %b, %Y")
for i in doc.transfer_details:
if i.property == "Designation":
data['designation'] = [i.current if i.current else "", i.new if i.new else ""]
if i.property == "Department":
data['department'] = [i.current if i.current else "", i.new if i.new else ""]
return data
@frappe.whitelist()
def get_table_data_for_promotion(doc):
data = frappe.db.sql("""select name,from_date,salary_structure from `tabSalary Structure Assignment` where employee = '%s' group by from_date desc """%(doc.employee))
send_data = {}
total_fixed_pay_monthly =0
total_fixed_pay_annually=0
total_fixed_deduction_annually=0
prev_sal_struct=0
items = ['basic_salary','house_rent_allowance','personal_allowance','conveyance_allowance','basic_salary','provident_fund','professional_tax']
for i in items:
send_data[i]=[0,0]
send_data['total_fixed_deduction_annually'] = 0
send_data['total_fixed_pay_monthly'] = 0
send_data['total_fixed_pay_annually'] = 0
send_data['gross'] = 0
send_data['prev_sal_struct']=0
if len(data) >= 2 :
doc_salary_structure = frappe.get_doc("Salary Structure",data[0][2])
for i in doc_salary_structure.earnings:
if i.salary_component == "Basic Salary":
send_data['basic_salary']=[i.amount,12 * i.amount]
total_fixed_pay_monthly += i.amount
total_fixed_pay_annually += 12 * i.amount
if i.salary_component == "House Rent Allowance":
send_data['house_rent_allowance']=[i.amount,12 * i.amount]
total_fixed_pay_monthly += i.amount
total_fixed_pay_annually += 12 * i.amount
if i.salary_component == "Personal Allowance":
send_data['personal_allowance']=[i.amount,12 * i.amount]
total_fixed_pay_monthly += i.amount
total_fixed_pay_annually += 12 * i.amount
if i.salary_component == "Conveyance Allowance/Expenses":
send_data['conveyance_allowance']=[i.amount,12 * i.amount]
total_fixed_pay_monthly += i.amount
total_fixed_pay_annually += 12 * i.amount
for i in doc_salary_structure.deductions:
if i.salary_component == "Provident Fund":
send_data['provident_fund']=[i.amount,12 * i.amount]
total_fixed_deduction_annually += 12 * i.amount
if i.salary_component == "Professional Tax":
send_data['professional_tax']=[i.amount,12 * i.amount]
total_fixed_deduction_annually += 12 * i.amount
send_data['total_fixed_deduction_annually'] = total_fixed_deduction_annually
send_data['total_fixed_pay_monthly'] = total_fixed_pay_monthly
send_data['total_fixed_pay_annually'] = total_fixed_pay_annually
send_data['gross'] = send_data['total_fixed_pay_annually']-send_data['total_fixed_deduction_annually']
doc_salary_structure_two = frappe.get_doc("Salary Structure",data[1][2])
for i in doc_salary_structure_two.earnings:
prev_sal_struct += i.amount
send_data['prev_sal_struct']=prev_sal_struct
#for i in doc_salary_structure_two.deductions:
#prev_sal_struct -= i.amount
return send_data
@frappe.whitelist()
def get_job_offer_data(doc):
class Dict2Class(object):
def __init__(self, my_dict):
for key in my_dict:
setattr(self, key, my_dict[key])
return get_tableA_data(Dict2Class(doc))
@frappe.whitelist()
def get_tableA_data(doc):
data = frappe.db.sql("""select name,from_date,salary_structure from `tabSalary Structure Assignment` where employee = '%s' group by from_date desc """%(doc.employee))
send_data = {}
total_fixed_pay_monthly =0
total_fixed_pay_annually=0
total_fixed_deduction_annually=0
items = ['basic_salary','house_rent_allowance','personal_allowance','conveyance_allowance','basic_salary','provident_fund','professional_tax']
for i in items:
send_data[i]=[0,0]
send_data['total_fixed_deduction_annually'] = 0
send_data['total_fixed_pay_monthly'] = 0
send_data['total_fixed_pay_annually'] = 0
send_data['gross'] = 0
if data:
doc_salary_structure = frappe.get_doc("Salary Structure",data[0][2])
for i in doc_salary_structure.earnings:
if i.salary_component == "Basic Salary":
send_data['basic_salary']=[i.amount,12 * i.amount]
total_fixed_pay_monthly += i.amount
total_fixed_pay_annually += 12 * i.amount
if i.salary_component == "House Rent Allowance":
send_data['house_rent_allowance']=[i.amount,12 * i.amount]
total_fixed_pay_monthly += i.amount
total_fixed_pay_annually += 12 * i.amount
if i.salary_component == "Personal Allowance":
send_data['personal_allowance']=[i.amount,12 * i.amount]
total_fixed_pay_monthly += i.amount
total_fixed_pay_annually += 12 * i.amount
if i.salary_component == "Conveyance Allowance/Expenses":
send_data['conveyance_allowance']=[i.amount,12 * i.amount]
total_fixed_pay_monthly += i.amount
total_fixed_pay_annually += 12 * i.amount
for i in doc_salary_structure.deductions:
if i.salary_component == "Provident Fund":
send_data['provident_fund']=[i.amount,12 * i.amount]
total_fixed_deduction_annually += 12 * i.amount
if i.salary_component == "Professional Tax":
send_data['professional_tax']=[i.amount,12 * i.amount]
total_fixed_deduction_annually += 12 * i.amount
send_data['total_fixed_deduction_annually'] = total_fixed_deduction_annually
send_data['total_fixed_pay_monthly'] = total_fixed_pay_monthly
send_data['total_fixed_pay_annually'] = total_fixed_pay_annually
send_data['gross'] = send_data['total_fixed_pay_annually']-send_data['total_fixed_deduction_annually']
data_bahrain = frappe.db.sql("""select ssa.name,ssa.from_date,ssa.salary_structure from `tabSalary Structure Assignment` ssa inner join `tabSalary Structure` ss on ssa.salary_structure = ss.name and ssa.employee = '%s' and ss.for_reference_purpose = 1 group by ssa.from_date desc """%(doc.employee))
total_earnings = 0
total_deduction = 0
basic_salary_bh = 0
send_bahrain_data={}
bahrain_items = ['basic_salary','house_rent_allowance','conveyance_allowance','client_engagement_allowance','siogosi','advance']
for j in bahrain_items:
send_bahrain_data[j]=0
send_bahrain_data['total_deduction']=0
send_bahrain_data['total_earnings']=0
send_bahrain_data['gross']=0
if data_bahrain:
doc_salary_structure = frappe.get_doc("Salary Structure",data_bahrain[0][2])
for i in doc_salary_structure.earnings:
if i.salary_component == "Basic Salary":
basic_salary_bh = i.amount
send_bahrain_data['basic_salary']=i.amount
total_earnings += i.amount
if i.salary_component == "House Rent Allowance":
send_bahrain_data['house_rent_allowance']=i.amount
total_earnings += i.amount
if i.salary_component == "Client Engagement Allowance":
send_bahrain_data['client_engagement_allowance']=i.amount
total_earnings += i.amount
if i.salary_component == "Conveyance Allowance/Expenses":
send_bahrain_data['conveyance_allowance']=i.amount
total_earnings += i.amount
for i in doc_salary_structure.deductions:
if i.salary_component == "SIO/GOSI":
send_bahrain_data['siogosi']=i.amount
total_deduction += basic_salary_bh * (i.amount/100)
if i.salary_component == "Advance":
send_bahrain_data['advance']=i.amount
total_deduction += i.amount
send_bahrain_data['total_deduction']=total_deduction
send_bahrain_data['total_earnings']=total_earnings
send_bahrain_data['gross']=send_bahrain_data['total_earnings']- send_bahrain_data['total_deduction']
send_data['bahrain_data'] = send_bahrain_data
return send_data
| en | 0.690899 | select name,from_date,salary_structure from `tabSalary Structure Assignment` where employee = '%s' group by from_date desc #for i in doc_salary_structure_two.deductions: #prev_sal_struct -= i.amount select name,from_date,salary_structure from `tabSalary Structure Assignment` where employee = '%s' group by from_date desc select ssa.name,ssa.from_date,ssa.salary_structure from `tabSalary Structure Assignment` ssa inner join `tabSalary Structure` ss on ssa.salary_structure = ss.name and ssa.employee = '%s' and ss.for_reference_purpose = 1 group by ssa.from_date desc | 1.994418 | 2 |
logfx.py | dylanleigh/dl-misc-scripts | 0 | 6619130 | #!/usr/bin/env python
#
# Logfx.py - Sound Effects for your logs
#
# Monitors a number of files for new lines; when a line is appended
# matching a specified regular expression, play a specified sound file.
#
# Also accepts expressions with a countdown timer (in seconds)
# embedded (e.g. "You have to wait 47 seconds for the next attempt".
# When this option is in effect the sound will be played after
# a delay of that many seconds.
#
# Requires pygame to play actual sounds (uses the mixer module to play audio).
# Otherwise it will fall back to sending beeps to the terminal for
# each event.
#
# Some assorted interesting, fun and mundane uses:
#
# - Immediate audio alert of all sorts of serious problems from
# syslog or dmesg.
#
# - Use with a very short & soft click sound to create a "gieger counter"
# effect; you can set this on a log file with a very widely matching regex
# (even ".") to tell when you are getting a lot of messages. Particularly fun
# with mail or web server logs.
#
# - Monitor dmesg or syslog for new wifi networks or transient errors.
#
# - MMORPGs (and other games) where you might switch away to another app while
# you are mining/travelling/doing something else which will take a few
# minutes. You can set sound effects to alert you noisily if you are being
# attacked or you can't do any more mining.
#
# Config file format is just a CSV file with each line of the form:
#
# <file-to-watch>,<regex>,<file-to-play>[,<options>]
#
# Options:
# "noecho" - Don't print the line to the terminal
# "delay" - requires that the regex include ([:digit:]) or similar to
# match the number of seconds to delay the sound effect (see example below).
#
# e.g.:
# /var/log/messages,device no longer idle,/home/me/soundfx/ping.wav
# /var/log/messages,Found new beacon,/home/me/soundfx/ping.wav,noecho
# /var/log/messages,Wait for ([0-9]+) sec,/home/me/soundfx/ping.wav,delay
#
# -----
#
# Copyright (c) 2012 <NAME>.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
#
# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE, EVEN IF ADVISED
# OF THE POSSIBILITY OF SUCH DAMAGE.
#
# -----
#
# TODO:
# - Option to play a sound effect on any exceptions which stop
# the script?
import os,sys,csv,time,re
try:
import pygame
except (ImportError):
print "Pygame not found, will use terminal beep instead"
pygame = False
################################################# globals
debug=False
logFiles = dict() # logfilepath -> WatchFile object
if pygame:
soundFiles = dict() # soundfilepath -> pygame sound object
delayQueue = list() # tuple (time_t to trigger, soundfile index)
################################################# classes
class WatchFile (object):
'''Encapsulates everything about a individual file that is watched
for changes - file name, file object, list of regexes and the
name of the sound file to play if there is a match.'''
def __init__(self, filename):
self.filename = filename
self.fobj = open(filename)
self.fobj.seek(0,os.SEEK_END) # skip to end of file
self.matchList = list() # new empty list of matches
if (debug):
print ('Watching file %s'%(filename,))
def addMatch(self, regex, soundfile, options):
'''Add a match to the list for this file. Pass the regex in text
form; it is stored compiled'''
tup = (re.compile(regex), soundfile, options)
self.matchList.append(tup)
if (debug):
print ('Adding match %s to file %s, playing %s'%(regex,self.filename,soundfile))
def findMatches(self):
'''Looks for new matches, returns a list of soundfiles to play'''
ret = list()
# reset EOF condition
# Read each line, see if it matches each regex
for line in self.fobj:
for tup in self.matchList:
match = tup[0].search(line)
if (match):
# matched the regex - play effect etc
options = tup[2]
if (options):
if ("delay" in options):
# get num of seconds
sec = match.group(1)
# determine time in future to play
future = time.time() + float(sec)
# add to queue
delayQueue.append((future, tup[1]))
continue;
if ("delay" not in options):
# play straight away
if pygame: # TODO DRY
soundFiles[tup[1]].play()
else:
print ""
if ("noecho" not in options):
print "%s:%s"%(self.filename,line)
else:
# no options - default
print "%s:%s"%(self.filename,line)
if pygame: # TODO DRY
soundFiles[tup[1]].play()
else:
print ""
self.fobj.seek(0,os.SEEK_CUR) # XXX: hack to clear internal for loop buffer
# end finMAtches
# end WatchFile
################################################# main begins here
if pygame:
pygame.mixer.init()
# open config file as commandline argument or exit
if (len(sys.argv) < 2):
sys.exit("Usage: logfx.py <config-file.csv>")
configcsv = csv.reader(open(sys.argv[1], 'rb'))
### parse config file into global structures
### use dicts to prevent repeat of logfile or soundfile loading
for row in configcsv:
if (len(row) < 3):
# assert row >= 3 items
print "Row too small: %s"%row
continue;
# 3 or 4 per line
logf = row[0]
regex = row[1]
sfile = row[2]
if (len(row) > 3):
options = row[3:] # XXX: options is a List
else:
options = None
if (not logf in logFiles): # logfile not referenced before
logFiles[logf] = WatchFile(logf)
# add new regex to new or existing entry
logFiles[logf].addMatch(regex,sfile,options)
if (pygame and not (sfile in soundFiles)): # sound file not referenced before
soundFiles[sfile] = pygame.mixer.Sound(sfile)
soundFiles[sfile].set_volume(0.5) # TODO an option to set volume! 0-1.0
# end config file parsing
# main loop
while True:
time.sleep(1)
# scan for new lines
for logf in logFiles:
logFiles[logf].findMatches()
# process delay queue
for t in delayQueue:
now = time.time()
if (t[0]<=now): # time to trigger <= now
if (pygame): # TODO DRY
soundFiles[t[1]].play()
else:
print ""
delayQueue.remove(t) # remove from queue
| #!/usr/bin/env python
#
# Logfx.py - Sound Effects for your logs
#
# Monitors a number of files for new lines; when a line is appended
# matching a specified regular expression, play a specified sound file.
#
# Also accepts expressions with a countdown timer (in seconds)
# embedded (e.g. "You have to wait 47 seconds for the next attempt".
# When this option is in effect the sound will be played after
# a delay of that many seconds.
#
# Requires pygame to play actual sounds (uses the mixer module to play audio).
# Otherwise it will fall back to sending beeps to the terminal for
# each event.
#
# Some assorted interesting, fun and mundane uses:
#
# - Immediate audio alert of all sorts of serious problems from
# syslog or dmesg.
#
# - Use with a very short & soft click sound to create a "gieger counter"
# effect; you can set this on a log file with a very widely matching regex
# (even ".") to tell when you are getting a lot of messages. Particularly fun
# with mail or web server logs.
#
# - Monitor dmesg or syslog for new wifi networks or transient errors.
#
# - MMORPGs (and other games) where you might switch away to another app while
# you are mining/travelling/doing something else which will take a few
# minutes. You can set sound effects to alert you noisily if you are being
# attacked or you can't do any more mining.
#
# Config file format is just a CSV file with each line of the form:
#
# <file-to-watch>,<regex>,<file-to-play>[,<options>]
#
# Options:
# "noecho" - Don't print the line to the terminal
# "delay" - requires that the regex include ([:digit:]) or similar to
# match the number of seconds to delay the sound effect (see example below).
#
# e.g.:
# /var/log/messages,device no longer idle,/home/me/soundfx/ping.wav
# /var/log/messages,Found new beacon,/home/me/soundfx/ping.wav,noecho
# /var/log/messages,Wait for ([0-9]+) sec,/home/me/soundfx/ping.wav,delay
#
# -----
#
# Copyright (c) 2012 <NAME>.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
#
# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE, EVEN IF ADVISED
# OF THE POSSIBILITY OF SUCH DAMAGE.
#
# -----
#
# TODO:
# - Option to play a sound effect on any exceptions which stop
# the script?
import os,sys,csv,time,re
try:
import pygame
except (ImportError):
print "Pygame not found, will use terminal beep instead"
pygame = False
################################################# globals
debug=False
logFiles = dict() # logfilepath -> WatchFile object
if pygame:
soundFiles = dict() # soundfilepath -> pygame sound object
delayQueue = list() # tuple (time_t to trigger, soundfile index)
################################################# classes
class WatchFile (object):
'''Encapsulates everything about a individual file that is watched
for changes - file name, file object, list of regexes and the
name of the sound file to play if there is a match.'''
def __init__(self, filename):
self.filename = filename
self.fobj = open(filename)
self.fobj.seek(0,os.SEEK_END) # skip to end of file
self.matchList = list() # new empty list of matches
if (debug):
print ('Watching file %s'%(filename,))
def addMatch(self, regex, soundfile, options):
'''Add a match to the list for this file. Pass the regex in text
form; it is stored compiled'''
tup = (re.compile(regex), soundfile, options)
self.matchList.append(tup)
if (debug):
print ('Adding match %s to file %s, playing %s'%(regex,self.filename,soundfile))
def findMatches(self):
'''Looks for new matches, returns a list of soundfiles to play'''
ret = list()
# reset EOF condition
# Read each line, see if it matches each regex
for line in self.fobj:
for tup in self.matchList:
match = tup[0].search(line)
if (match):
# matched the regex - play effect etc
options = tup[2]
if (options):
if ("delay" in options):
# get num of seconds
sec = match.group(1)
# determine time in future to play
future = time.time() + float(sec)
# add to queue
delayQueue.append((future, tup[1]))
continue;
if ("delay" not in options):
# play straight away
if pygame: # TODO DRY
soundFiles[tup[1]].play()
else:
print ""
if ("noecho" not in options):
print "%s:%s"%(self.filename,line)
else:
# no options - default
print "%s:%s"%(self.filename,line)
if pygame: # TODO DRY
soundFiles[tup[1]].play()
else:
print ""
self.fobj.seek(0,os.SEEK_CUR) # XXX: hack to clear internal for loop buffer
# end finMAtches
# end WatchFile
################################################# main begins here
if pygame:
pygame.mixer.init()
# open config file as commandline argument or exit
if (len(sys.argv) < 2):
sys.exit("Usage: logfx.py <config-file.csv>")
configcsv = csv.reader(open(sys.argv[1], 'rb'))
### parse config file into global structures
### use dicts to prevent repeat of logfile or soundfile loading
for row in configcsv:
if (len(row) < 3):
# assert row >= 3 items
print "Row too small: %s"%row
continue;
# 3 or 4 per line
logf = row[0]
regex = row[1]
sfile = row[2]
if (len(row) > 3):
options = row[3:] # XXX: options is a List
else:
options = None
if (not logf in logFiles): # logfile not referenced before
logFiles[logf] = WatchFile(logf)
# add new regex to new or existing entry
logFiles[logf].addMatch(regex,sfile,options)
if (pygame and not (sfile in soundFiles)): # sound file not referenced before
soundFiles[sfile] = pygame.mixer.Sound(sfile)
soundFiles[sfile].set_volume(0.5) # TODO an option to set volume! 0-1.0
# end config file parsing
# main loop
while True:
time.sleep(1)
# scan for new lines
for logf in logFiles:
logFiles[logf].findMatches()
# process delay queue
for t in delayQueue:
now = time.time()
if (t[0]<=now): # time to trigger <= now
if (pygame): # TODO DRY
soundFiles[t[1]].play()
else:
print ""
delayQueue.remove(t) # remove from queue
| en | 0.750832 | #!/usr/bin/env python # # Logfx.py - Sound Effects for your logs # # Monitors a number of files for new lines; when a line is appended # matching a specified regular expression, play a specified sound file. # # Also accepts expressions with a countdown timer (in seconds) # embedded (e.g. "You have to wait 47 seconds for the next attempt". # When this option is in effect the sound will be played after # a delay of that many seconds. # # Requires pygame to play actual sounds (uses the mixer module to play audio). # Otherwise it will fall back to sending beeps to the terminal for # each event. # # Some assorted interesting, fun and mundane uses: # # - Immediate audio alert of all sorts of serious problems from # syslog or dmesg. # # - Use with a very short & soft click sound to create a "gieger counter" # effect; you can set this on a log file with a very widely matching regex # (even ".") to tell when you are getting a lot of messages. Particularly fun # with mail or web server logs. # # - Monitor dmesg or syslog for new wifi networks or transient errors. # # - MMORPGs (and other games) where you might switch away to another app while # you are mining/travelling/doing something else which will take a few # minutes. You can set sound effects to alert you noisily if you are being # attacked or you can't do any more mining. # # Config file format is just a CSV file with each line of the form: # # <file-to-watch>,<regex>,<file-to-play>[,<options>] # # Options: # "noecho" - Don't print the line to the terminal # "delay" - requires that the regex include ([:digit:]) or similar to # match the number of seconds to delay the sound effect (see example below). # # e.g.: # /var/log/messages,device no longer idle,/home/me/soundfx/ping.wav # /var/log/messages,Found new beacon,/home/me/soundfx/ping.wav,noecho # /var/log/messages,Wait for ([0-9]+) sec,/home/me/soundfx/ping.wav,delay # # ----- # # Copyright (c) 2012 <NAME>. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. # # IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY # CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, # TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE # SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE, EVEN IF ADVISED # OF THE POSSIBILITY OF SUCH DAMAGE. # # ----- # # TODO: # - Option to play a sound effect on any exceptions which stop # the script? ################################################# globals # logfilepath -> WatchFile object # soundfilepath -> pygame sound object # tuple (time_t to trigger, soundfile index) ################################################# classes Encapsulates everything about a individual file that is watched for changes - file name, file object, list of regexes and the name of the sound file to play if there is a match. # skip to end of file # new empty list of matches Add a match to the list for this file. Pass the regex in text form; it is stored compiled Looks for new matches, returns a list of soundfiles to play # reset EOF condition # Read each line, see if it matches each regex # matched the regex - play effect etc # get num of seconds # determine time in future to play # add to queue # play straight away # TODO DRY # no options - default # TODO DRY # XXX: hack to clear internal for loop buffer # end finMAtches # end WatchFile ################################################# main begins here # open config file as commandline argument or exit ### parse config file into global structures ### use dicts to prevent repeat of logfile or soundfile loading # assert row >= 3 items # 3 or 4 per line # XXX: options is a List # logfile not referenced before # add new regex to new or existing entry # sound file not referenced before # TODO an option to set volume! 0-1.0 # end config file parsing # main loop # scan for new lines # process delay queue # time to trigger <= now # TODO DRY # remove from queue | 2.575002 | 3 |
tests/common.py | thehesiod/rate-limiter | 0 | 6619131 | import asynctest
import unittest
_none_type = type(None)
def _assertRecursiveAlmostEqual(self, first, second, places=None, msg=None, delta=None):
"""Fail if the two objects are unequal as determined by their
difference rounded to the given number of decimal places
(default 7) and comparing to zero, or by comparing that the
between the two objects is more than the given delta.
Note that decimal places (from zero) are usually not the same
as significant digits (measured from the most signficant digit).
If the two objects compare equal then they will automatically
compare almost equal.
"""
if type(first) != type(second) and not (isinstance(first, (float, int, complex)) and isinstance(second, (float, int, complex))):
return self.assertEqual(first, second) # will raise mis-matched types
if isinstance(first, (_none_type, str)):
self.assertEqual(first, second)
elif isinstance(first, (float, int, complex)):
self.assertAlmostEqual(first, second, places, msg, delta)
elif isinstance(first, dict):
self.assertEqual(set(first.keys()), set(second.keys())) # will raise keys don't match
for f_k, f_v in first.items():
try:
self.assertRecursiveAlmostEqual(f_v, second[f_k], places, msg, delta)
except Exception as e:
raise Exception("Error with key: {}".format(f_k)) from e
elif isinstance(first, (list, tuple)):
if len(first) != len(second):
self.assertEqual(first, second) # will raise list don't have same length
for idx in range(len(first)):
try:
self.assertRecursiveAlmostEqual(first[idx], second[idx], places, msg, delta)
except Exception as e:
raise Exception("Error with index: {}".format(idx)) from e
else:
assert False # unsupported
# Monkeypatch in method
asynctest.TestCase.assertRecursiveAlmostEqual = _assertRecursiveAlmostEqual
unittest.TestCase.assertRecursiveAlmostEqual = _assertRecursiveAlmostEqual
| import asynctest
import unittest
_none_type = type(None)
def _assertRecursiveAlmostEqual(self, first, second, places=None, msg=None, delta=None):
"""Fail if the two objects are unequal as determined by their
difference rounded to the given number of decimal places
(default 7) and comparing to zero, or by comparing that the
between the two objects is more than the given delta.
Note that decimal places (from zero) are usually not the same
as significant digits (measured from the most signficant digit).
If the two objects compare equal then they will automatically
compare almost equal.
"""
if type(first) != type(second) and not (isinstance(first, (float, int, complex)) and isinstance(second, (float, int, complex))):
return self.assertEqual(first, second) # will raise mis-matched types
if isinstance(first, (_none_type, str)):
self.assertEqual(first, second)
elif isinstance(first, (float, int, complex)):
self.assertAlmostEqual(first, second, places, msg, delta)
elif isinstance(first, dict):
self.assertEqual(set(first.keys()), set(second.keys())) # will raise keys don't match
for f_k, f_v in first.items():
try:
self.assertRecursiveAlmostEqual(f_v, second[f_k], places, msg, delta)
except Exception as e:
raise Exception("Error with key: {}".format(f_k)) from e
elif isinstance(first, (list, tuple)):
if len(first) != len(second):
self.assertEqual(first, second) # will raise list don't have same length
for idx in range(len(first)):
try:
self.assertRecursiveAlmostEqual(first[idx], second[idx], places, msg, delta)
except Exception as e:
raise Exception("Error with index: {}".format(idx)) from e
else:
assert False # unsupported
# Monkeypatch in method
asynctest.TestCase.assertRecursiveAlmostEqual = _assertRecursiveAlmostEqual
unittest.TestCase.assertRecursiveAlmostEqual = _assertRecursiveAlmostEqual
| en | 0.927755 | Fail if the two objects are unequal as determined by their difference rounded to the given number of decimal places (default 7) and comparing to zero, or by comparing that the between the two objects is more than the given delta. Note that decimal places (from zero) are usually not the same as significant digits (measured from the most signficant digit). If the two objects compare equal then they will automatically compare almost equal. # will raise mis-matched types # will raise keys don't match # will raise list don't have same length # unsupported # Monkeypatch in method | 3.377361 | 3 |
tests/test_top_level_rtseq_params.py | arnoldcsorvasi/niveristand-python | 6 | 6619132 | <filename>tests/test_top_level_rtseq_params.py
from niveristand import nivs_rt_sequence
from niveristand import NivsParam
from niveristand import realtimesequencetools
from niveristand.clientapi import ChannelReference, DoubleValue, DoubleValueArray, RealTimeSequence
from niveristand.errors import VeristandError
import pytest
@NivsParam('p', DoubleValue(1.2), NivsParam.BY_REF)
@nivs_rt_sequence
def func1(p):
return p.value
@NivsParam('p', DoubleValueArray([1.2, 2.3]), NivsParam.BY_VALUE)
@nivs_rt_sequence
def func2(p):
return p[0].value
def test_run_py_as_rtseq_numeric_param():
actual = realtimesequencetools.run_py_as_rtseq(func1, {"p": DoubleValue(2.3)})
assert actual == 2.3
def test_run_py_as_rtseq_channel_reference_param():
desired_rpm = ChannelReference('Aliases/DesiredRPM')
desired_rpm.value = 100.101
actual = realtimesequencetools.run_py_as_rtseq(func1, {"p": desired_rpm})
assert actual == 100.101
def test_run_py_as_rtseq_invalid_extra_parameter():
with pytest.raises(VeristandError):
realtimesequencetools.run_py_as_rtseq(func1, {"p": DoubleValue(2.3), "pp": DoubleValue(3.4)})
def test_run_py_as_rtseq_missing_by_ref_parameter():
with pytest.raises(VeristandError):
realtimesequencetools.run_py_as_rtseq(func1, {})
def test_run_py_as_rtseq_missing_by_value_parameter():
actual = realtimesequencetools.run_py_as_rtseq(func2, {})
assert actual == 1.2
def test_run_py_as_rtseq_wrong_parameter_data_type():
with pytest.raises(VeristandError):
realtimesequencetools.run_py_as_rtseq(func2, {"p": DoubleValue(2.3)})
def test_realtimesequence_numeric_param():
rtseq = RealTimeSequence(func1)
actual = rtseq.run({"p": DoubleValue(2.3)})
actual.wait_for_result()
assert actual.ret_val == 2.3
def test_realtimesequence_channel_reference_param():
desired_rpm = ChannelReference('Aliases/DesiredRPM')
desired_rpm.value = 100.101
rtseq = RealTimeSequence(func1)
actual = rtseq.run({"p": desired_rpm})
actual.wait_for_result()
assert actual.ret_val == 100.101
def test_realtimesequence_invalid_extra_parameter():
rtseq = RealTimeSequence(func1)
with pytest.raises(VeristandError):
rtseq.run({"p": DoubleValue(2.3), "pp": DoubleValue(3.4)})
def test_realtimesequence_missing_by_ref_parameter():
rtseq = RealTimeSequence(func1)
with pytest.raises(VeristandError):
rtseq.run({})
def test_realtimesequence_missing_by_value_parameter():
rtseq = RealTimeSequence(func2)
actual = rtseq.run({})
actual.wait_for_result()
assert actual.ret_val == 1.2
def test_realtimesequence_wrong_parameter_data_type():
rtseq = RealTimeSequence(func2)
with pytest.raises(VeristandError):
rtseq.run({"p": DoubleValue(2.3)})
| <filename>tests/test_top_level_rtseq_params.py
from niveristand import nivs_rt_sequence
from niveristand import NivsParam
from niveristand import realtimesequencetools
from niveristand.clientapi import ChannelReference, DoubleValue, DoubleValueArray, RealTimeSequence
from niveristand.errors import VeristandError
import pytest
@NivsParam('p', DoubleValue(1.2), NivsParam.BY_REF)
@nivs_rt_sequence
def func1(p):
return p.value
@NivsParam('p', DoubleValueArray([1.2, 2.3]), NivsParam.BY_VALUE)
@nivs_rt_sequence
def func2(p):
return p[0].value
def test_run_py_as_rtseq_numeric_param():
actual = realtimesequencetools.run_py_as_rtseq(func1, {"p": DoubleValue(2.3)})
assert actual == 2.3
def test_run_py_as_rtseq_channel_reference_param():
desired_rpm = ChannelReference('Aliases/DesiredRPM')
desired_rpm.value = 100.101
actual = realtimesequencetools.run_py_as_rtseq(func1, {"p": desired_rpm})
assert actual == 100.101
def test_run_py_as_rtseq_invalid_extra_parameter():
with pytest.raises(VeristandError):
realtimesequencetools.run_py_as_rtseq(func1, {"p": DoubleValue(2.3), "pp": DoubleValue(3.4)})
def test_run_py_as_rtseq_missing_by_ref_parameter():
with pytest.raises(VeristandError):
realtimesequencetools.run_py_as_rtseq(func1, {})
def test_run_py_as_rtseq_missing_by_value_parameter():
actual = realtimesequencetools.run_py_as_rtseq(func2, {})
assert actual == 1.2
def test_run_py_as_rtseq_wrong_parameter_data_type():
with pytest.raises(VeristandError):
realtimesequencetools.run_py_as_rtseq(func2, {"p": DoubleValue(2.3)})
def test_realtimesequence_numeric_param():
rtseq = RealTimeSequence(func1)
actual = rtseq.run({"p": DoubleValue(2.3)})
actual.wait_for_result()
assert actual.ret_val == 2.3
def test_realtimesequence_channel_reference_param():
desired_rpm = ChannelReference('Aliases/DesiredRPM')
desired_rpm.value = 100.101
rtseq = RealTimeSequence(func1)
actual = rtseq.run({"p": desired_rpm})
actual.wait_for_result()
assert actual.ret_val == 100.101
def test_realtimesequence_invalid_extra_parameter():
rtseq = RealTimeSequence(func1)
with pytest.raises(VeristandError):
rtseq.run({"p": DoubleValue(2.3), "pp": DoubleValue(3.4)})
def test_realtimesequence_missing_by_ref_parameter():
rtseq = RealTimeSequence(func1)
with pytest.raises(VeristandError):
rtseq.run({})
def test_realtimesequence_missing_by_value_parameter():
rtseq = RealTimeSequence(func2)
actual = rtseq.run({})
actual.wait_for_result()
assert actual.ret_val == 1.2
def test_realtimesequence_wrong_parameter_data_type():
rtseq = RealTimeSequence(func2)
with pytest.raises(VeristandError):
rtseq.run({"p": DoubleValue(2.3)})
| none | 1 | 2.004368 | 2 | |
contests/atcoder/abc087/arc090_b/main.py | conao3/coder | 0 | 6619133 | <gh_stars>0
#!/usr/bin/env python3
# from typing import *
# def solve(N: str, M: str, L: List[str], R: List[str], D: List[str]) -> str:
def solve(N, M, L, R, D):
pass # TODO: edit here
# generated by online-judge-template-generator v4.1.0 (https://github.com/kmyk/online-judge-template-generator)
def main():
N, M = input().split()
L = [None for _ in range(M)]
R = [None for _ in range(M)]
D = [None for _ in range(M)]
for i in range(M):
L[i], R[i], D[i] = input().split()
bpv = solve(N, M, L, R, D)
print(bpv)
if __name__ == '__main__':
main()
| #!/usr/bin/env python3
# from typing import *
# def solve(N: str, M: str, L: List[str], R: List[str], D: List[str]) -> str:
def solve(N, M, L, R, D):
pass # TODO: edit here
# generated by online-judge-template-generator v4.1.0 (https://github.com/kmyk/online-judge-template-generator)
def main():
N, M = input().split()
L = [None for _ in range(M)]
R = [None for _ in range(M)]
D = [None for _ in range(M)]
for i in range(M):
L[i], R[i], D[i] = input().split()
bpv = solve(N, M, L, R, D)
print(bpv)
if __name__ == '__main__':
main() | en | 0.50066 | #!/usr/bin/env python3 # from typing import * # def solve(N: str, M: str, L: List[str], R: List[str], D: List[str]) -> str: # TODO: edit here # generated by online-judge-template-generator v4.1.0 (https://github.com/kmyk/online-judge-template-generator) | 3.194177 | 3 |
pkg/__init__.py | brettcannon/python-project-template | 3 | 6619134 | <gh_stars>1-10
"""Sample Python project."""
__version__ = "0"
| """Sample Python project."""
__version__ = "0" | en | 0.744657 | Sample Python project. | 1.148356 | 1 |
minigest/docfisc/admin/__init__.py | ctrlmaniac/minigest | 0 | 6619135 | <reponame>ctrlmaniac/minigest
from .chiusura_fiscale import ChiusuraFiscaleAdmin
from .distinta import DistintaAdmin, DistintaInline
from .fattura import FatturaAdmin
from .fattura_rata import FatturaRataAdmin, FatturaRataAdminInline
from .tipo_documento import TipoDocumentoAdmin
__all__ = [
"FatturaRataAdmin",
"FatturaRataAdminInline",
"FatturaAdmin",
"TipoDocumentoAdmin",
"ChiusuraFiscaleAdmin",
"DistintaAdmin",
"DistintaInline",
]
| from .chiusura_fiscale import ChiusuraFiscaleAdmin
from .distinta import DistintaAdmin, DistintaInline
from .fattura import FatturaAdmin
from .fattura_rata import FatturaRataAdmin, FatturaRataAdminInline
from .tipo_documento import TipoDocumentoAdmin
__all__ = [
"FatturaRataAdmin",
"FatturaRataAdminInline",
"FatturaAdmin",
"TipoDocumentoAdmin",
"ChiusuraFiscaleAdmin",
"DistintaAdmin",
"DistintaInline",
] | none | 1 | 1.070949 | 1 | |
contrib/SP/setup2.py | xylar/cdat | 62 | 6619136 | <gh_stars>10-100
#!/usr/bin/env python
from distutils.core import setup, Extension
from distutils.command.install_headers import install_headers
import os, sys
from glob import glob
class Dummy:
pass
pkginfo = Dummy()
execfile('Scientific/__pkginfo__.py', pkginfo.__dict__)
extra_compile_args = []
arrayobject_h_include = []
data_files = []
scripts = []
cmdclass = {}
options = {}
use_numpy = True
use_numeric = False
use_numarray = False
if "--numpy" in sys.argv:
sys.argv.remove("--numpy")
if "--numeric" in sys.argv:
use_numeric = True
use_numpy = False
sys.argv.remove("--numeric")
if "--numarray" in sys.argv:
use_numarray = True
use_numpy = False
sys.argv.remove("--numarray")
install_prefix = sys.prefix
for arg in sys.argv[1:]:
if arg[:9] == "--prefix=":
install_prefix = arg[9:]
if use_numeric:
extra_compile_args.append("-DNUMERIC=1")
elif use_numarray:
extra_compile_args.append("-DNUMARRAY=1")
else :
extra_compile_args.append("-DNUMPY=1")
if sys.platform == 'win32':
arrayobject_h_include = [os.path.join(install_prefix,
"Lib/site-packages/numpy/core/include")]
else:
arrayobject_h_include = [os.path.join(install_prefix,
"lib/python%s.%s/site-packages/numpy/core/include"
% sys.version_info [:2])]
math_libraries = []
if sys.platform != 'win32':
math_libraries.append('m')
#
# Locate netCDF library
#
netcdf_prefix = None
for arg in sys.argv[1:]:
if arg[:16] == "--netcdf_prefix=":
netcdf_prefix = arg[16:]
sys.argv.remove(arg)
break
if sys.platform == 'win32':
netcdf_dll = None
for arg in sys.argv[1:]:
if arg[:13] == "--netcdf_dll=":
netcdf_dll = arg[13:]
sys.argv.remove(arg)
break
if netcdf_prefix is None:
try:
netcdf_prefix=os.environ['NETCDF_PREFIX']
except KeyError:
pass
if netcdf_prefix is None:
for netcdf_prefix in ['/usr/local', '/usr', '/sw']:
netcdf_include = [os.path.join(netcdf_prefix, 'include'),]
netcdf_lib = [os.path.join(netcdf_prefix, 'lib'),]
if os.path.exists(os.path.join(netcdf_include[0], 'netcdf.h')):
break
else:
netcdf_prefix = None
### PCMDI's addition for NetCDF4
netcdf_libraries = ['netcdf',]
netcdf_include = None
try:
import cdat_info
netcdf_include = cdat_info.cdunif_include_directories
netcdf_lib = cdat_info.cdunif_library_directories
netcdf_libraries = cdat_info.cdunif_libraries
netcdf_prefix=''
netcdf_h_file = os.path.join(netcdf_prefix, 'netcdf.h')
except:
pass
## END OF PCMDI adds
if netcdf_prefix is None:
print "netCDF not found, the netCDF module will not be built!"
if sys.platform != 'win32':
print "If netCDF is installed somewhere on this computer,"
print "please set NETCDF_PREFIX to the path where"
print "include/netcdf.h and lib/netcdf.a are located"
print "and re-run the build procedure."
ext_modules = []
else:
if sys.platform == 'win32':
if netcdf_dll is None:
print "Option --netcdf_dll is missing"
raise SystemExit
netcdf_include = [netcdf_prefix,]
netcdf_h_file = os.path.join(netcdf_prefix, 'netcdf.h')
netcdf_lib = [netcdf_dll,]
data_files.append(('DLLs', [os.path.join(netcdf_dll, 'netcdf.dll')]))
scripts.append('scientific_win32_postinstall.py')
options['bdist_wininst'] = {'install_script': "scientific_win32_postinstall.py"}
else:
## PCMDI modified for opendap
if netcdf_prefix != '':
print "Using netCDF installation in ", netcdf_prefix
netcdf_include = [os.path.join(netcdf_prefix, 'include'),]
netcdf_h_file = os.path.join(netcdf_prefix, 'include', 'netcdf.h')
netcdf_lib = [os.path.join(netcdf_prefix, 'lib'),]
else:
print "Using netCDF installation in ", netcdf_prefix
netcdf_h_file = os.path.join(netcdf_include[1], 'netcdf.h')
print 'NC File:',netcdf_h_file
##end
ext_modules = [Extension('Scientific_netcdf',
['Src/Scientific_netcdf.c'],
include_dirs=['Include'] + netcdf_include + arrayobject_h_include,
library_dirs=netcdf_lib,
libraries = ['netcdf'],
extra_compile_args=extra_compile_args)]
try:
# Add code for including documentation in Mac packages
import bdist_mpkg
from distutils.command.bdist_mpkg import bdist_mpkg as bdist_mpkg
class my_bdist_mpkg(bdist_mpkg):
def initialize_options(self):
bdist_mpkg.initialize_options(self)
self.scheme_descriptions['examples'] = u'(Optional) ScientificPython example code'
self.scheme_map['examples'] = '/Developer/Python/ScientificPython/Examples'
self.scheme_copy['examples'] = 'Examples'
self.scheme_descriptions['doc'] = u'(Optional) ScientificPython documentation'
self.scheme_map['doc'] = '/Developer/Python/ScientificPython/Documentation'
self.scheme_copy['doc'] = 'Doc'
cmdclass['bdist_mpkg'] = my_bdist_mpkg
except ImportError:
pass
packages = ['Scientific', 'Scientific.Clustering', 'Scientific.Functions',
'Scientific.Geometry', 'Scientific.IO',
'Scientific.Physics', 'Scientific.QtWidgets',
'Scientific.Statistics', 'Scientific.Signals',
'Scientific.Threading', 'Scientific.TkWidgets',
'Scientific.Visualization', 'Scientific.MPI',
'Scientific.DistributedComputing']
ext_modules.append(Extension('Scientific_vector',
['Src/Scientific_vector.c'],
include_dirs=['Include']+arrayobject_h_include,
libraries=math_libraries,
extra_compile_args=extra_compile_args))
ext_modules.append(Extension('Scientific_affinitypropagation',
['Src/Scientific_affinitypropagation.c'],
include_dirs=['Include']+arrayobject_h_include,
libraries=math_libraries,
extra_compile_args=extra_compile_args))
ext_modules.append(Extension('Scientific_numerics_package_id',
['Src/Scientific_numerics_package_id.c'],
include_dirs=['Include']+arrayobject_h_include,
extra_compile_args=extra_compile_args))
ext_modules.append(Extension('Scientific_interpolation',
['Src/Scientific_interpolation.c'],
include_dirs=['Include']+arrayobject_h_include,
libraries=math_libraries,
extra_compile_args=extra_compile_args))
scripts.append('task_manager')
if sys.version[:3] >= '2.1':
packages.append('Scientific.BSP')
scripts.append('bsp_virtual')
class modified_install_headers(install_headers):
def finalize_options(self):
install_headers.finalize_options(self)
self.install_dir = \
os.path.join(os.path.split(self.install_dir)[0], 'Scientific')
cmdclass['install_headers'] = modified_install_headers
headers = glob(os.path.join ("Include","Scientific","*.h"))
if netcdf_prefix is not None:
headers.append(netcdf_h_file)
setup (name = "ScientificPython",
version = pkginfo.__version__,
description = "Various Python modules for scientific computing",
long_description =
"""ScientificPython is a collection of Python modules that are useful
for scientific computing. In this collection you will find modules
that cover basic geometry (vectors, tensors, transformations, vector
and tensor fields), quaternions, automatic derivatives, (linear)
interpolation, polynomials, elementary statistics, nonlinear
least-squares fits, unit calculations, Fortran-compatible text
formatting, 3D visualization via VRML, and two Tk widgets for simple
line plots and 3D wireframe models.""",
author = "<NAME>",
author_email = "<EMAIL>",
url = "http://dirac.cnrs-orleans.fr/ScientificPython/",
license = "CeCILL",
packages = packages,
headers = headers,
ext_package = 'Scientific.'+sys.platform,
ext_modules = ext_modules,
scripts = scripts,
data_files = data_files,
cmdclass = cmdclass,
options = options,
)
| #!/usr/bin/env python
from distutils.core import setup, Extension
from distutils.command.install_headers import install_headers
import os, sys
from glob import glob
class Dummy:
pass
pkginfo = Dummy()
execfile('Scientific/__pkginfo__.py', pkginfo.__dict__)
extra_compile_args = []
arrayobject_h_include = []
data_files = []
scripts = []
cmdclass = {}
options = {}
use_numpy = True
use_numeric = False
use_numarray = False
if "--numpy" in sys.argv:
sys.argv.remove("--numpy")
if "--numeric" in sys.argv:
use_numeric = True
use_numpy = False
sys.argv.remove("--numeric")
if "--numarray" in sys.argv:
use_numarray = True
use_numpy = False
sys.argv.remove("--numarray")
install_prefix = sys.prefix
for arg in sys.argv[1:]:
if arg[:9] == "--prefix=":
install_prefix = arg[9:]
if use_numeric:
extra_compile_args.append("-DNUMERIC=1")
elif use_numarray:
extra_compile_args.append("-DNUMARRAY=1")
else :
extra_compile_args.append("-DNUMPY=1")
if sys.platform == 'win32':
arrayobject_h_include = [os.path.join(install_prefix,
"Lib/site-packages/numpy/core/include")]
else:
arrayobject_h_include = [os.path.join(install_prefix,
"lib/python%s.%s/site-packages/numpy/core/include"
% sys.version_info [:2])]
math_libraries = []
if sys.platform != 'win32':
math_libraries.append('m')
#
# Locate netCDF library
#
netcdf_prefix = None
for arg in sys.argv[1:]:
if arg[:16] == "--netcdf_prefix=":
netcdf_prefix = arg[16:]
sys.argv.remove(arg)
break
if sys.platform == 'win32':
netcdf_dll = None
for arg in sys.argv[1:]:
if arg[:13] == "--netcdf_dll=":
netcdf_dll = arg[13:]
sys.argv.remove(arg)
break
if netcdf_prefix is None:
try:
netcdf_prefix=os.environ['NETCDF_PREFIX']
except KeyError:
pass
if netcdf_prefix is None:
for netcdf_prefix in ['/usr/local', '/usr', '/sw']:
netcdf_include = [os.path.join(netcdf_prefix, 'include'),]
netcdf_lib = [os.path.join(netcdf_prefix, 'lib'),]
if os.path.exists(os.path.join(netcdf_include[0], 'netcdf.h')):
break
else:
netcdf_prefix = None
### PCMDI's addition for NetCDF4
netcdf_libraries = ['netcdf',]
netcdf_include = None
try:
import cdat_info
netcdf_include = cdat_info.cdunif_include_directories
netcdf_lib = cdat_info.cdunif_library_directories
netcdf_libraries = cdat_info.cdunif_libraries
netcdf_prefix=''
netcdf_h_file = os.path.join(netcdf_prefix, 'netcdf.h')
except:
pass
## END OF PCMDI adds
if netcdf_prefix is None:
print "netCDF not found, the netCDF module will not be built!"
if sys.platform != 'win32':
print "If netCDF is installed somewhere on this computer,"
print "please set NETCDF_PREFIX to the path where"
print "include/netcdf.h and lib/netcdf.a are located"
print "and re-run the build procedure."
ext_modules = []
else:
if sys.platform == 'win32':
if netcdf_dll is None:
print "Option --netcdf_dll is missing"
raise SystemExit
netcdf_include = [netcdf_prefix,]
netcdf_h_file = os.path.join(netcdf_prefix, 'netcdf.h')
netcdf_lib = [netcdf_dll,]
data_files.append(('DLLs', [os.path.join(netcdf_dll, 'netcdf.dll')]))
scripts.append('scientific_win32_postinstall.py')
options['bdist_wininst'] = {'install_script': "scientific_win32_postinstall.py"}
else:
## PCMDI modified for opendap
if netcdf_prefix != '':
print "Using netCDF installation in ", netcdf_prefix
netcdf_include = [os.path.join(netcdf_prefix, 'include'),]
netcdf_h_file = os.path.join(netcdf_prefix, 'include', 'netcdf.h')
netcdf_lib = [os.path.join(netcdf_prefix, 'lib'),]
else:
print "Using netCDF installation in ", netcdf_prefix
netcdf_h_file = os.path.join(netcdf_include[1], 'netcdf.h')
print 'NC File:',netcdf_h_file
##end
ext_modules = [Extension('Scientific_netcdf',
['Src/Scientific_netcdf.c'],
include_dirs=['Include'] + netcdf_include + arrayobject_h_include,
library_dirs=netcdf_lib,
libraries = ['netcdf'],
extra_compile_args=extra_compile_args)]
try:
# Add code for including documentation in Mac packages
import bdist_mpkg
from distutils.command.bdist_mpkg import bdist_mpkg as bdist_mpkg
class my_bdist_mpkg(bdist_mpkg):
def initialize_options(self):
bdist_mpkg.initialize_options(self)
self.scheme_descriptions['examples'] = u'(Optional) ScientificPython example code'
self.scheme_map['examples'] = '/Developer/Python/ScientificPython/Examples'
self.scheme_copy['examples'] = 'Examples'
self.scheme_descriptions['doc'] = u'(Optional) ScientificPython documentation'
self.scheme_map['doc'] = '/Developer/Python/ScientificPython/Documentation'
self.scheme_copy['doc'] = 'Doc'
cmdclass['bdist_mpkg'] = my_bdist_mpkg
except ImportError:
pass
packages = ['Scientific', 'Scientific.Clustering', 'Scientific.Functions',
'Scientific.Geometry', 'Scientific.IO',
'Scientific.Physics', 'Scientific.QtWidgets',
'Scientific.Statistics', 'Scientific.Signals',
'Scientific.Threading', 'Scientific.TkWidgets',
'Scientific.Visualization', 'Scientific.MPI',
'Scientific.DistributedComputing']
ext_modules.append(Extension('Scientific_vector',
['Src/Scientific_vector.c'],
include_dirs=['Include']+arrayobject_h_include,
libraries=math_libraries,
extra_compile_args=extra_compile_args))
ext_modules.append(Extension('Scientific_affinitypropagation',
['Src/Scientific_affinitypropagation.c'],
include_dirs=['Include']+arrayobject_h_include,
libraries=math_libraries,
extra_compile_args=extra_compile_args))
ext_modules.append(Extension('Scientific_numerics_package_id',
['Src/Scientific_numerics_package_id.c'],
include_dirs=['Include']+arrayobject_h_include,
extra_compile_args=extra_compile_args))
ext_modules.append(Extension('Scientific_interpolation',
['Src/Scientific_interpolation.c'],
include_dirs=['Include']+arrayobject_h_include,
libraries=math_libraries,
extra_compile_args=extra_compile_args))
scripts.append('task_manager')
if sys.version[:3] >= '2.1':
packages.append('Scientific.BSP')
scripts.append('bsp_virtual')
class modified_install_headers(install_headers):
def finalize_options(self):
install_headers.finalize_options(self)
self.install_dir = \
os.path.join(os.path.split(self.install_dir)[0], 'Scientific')
cmdclass['install_headers'] = modified_install_headers
headers = glob(os.path.join ("Include","Scientific","*.h"))
if netcdf_prefix is not None:
headers.append(netcdf_h_file)
setup (name = "ScientificPython",
version = pkginfo.__version__,
description = "Various Python modules for scientific computing",
long_description =
"""ScientificPython is a collection of Python modules that are useful
for scientific computing. In this collection you will find modules
that cover basic geometry (vectors, tensors, transformations, vector
and tensor fields), quaternions, automatic derivatives, (linear)
interpolation, polynomials, elementary statistics, nonlinear
least-squares fits, unit calculations, Fortran-compatible text
formatting, 3D visualization via VRML, and two Tk widgets for simple
line plots and 3D wireframe models.""",
author = "<NAME>",
author_email = "<EMAIL>",
url = "http://dirac.cnrs-orleans.fr/ScientificPython/",
license = "CeCILL",
packages = packages,
headers = headers,
ext_package = 'Scientific.'+sys.platform,
ext_modules = ext_modules,
scripts = scripts,
data_files = data_files,
cmdclass = cmdclass,
options = options,
) | en | 0.722248 | #!/usr/bin/env python # # Locate netCDF library # ### PCMDI's addition for NetCDF4 ## END OF PCMDI adds ## PCMDI modified for opendap ##end # Add code for including documentation in Mac packages ScientificPython is a collection of Python modules that are useful for scientific computing. In this collection you will find modules that cover basic geometry (vectors, tensors, transformations, vector and tensor fields), quaternions, automatic derivatives, (linear) interpolation, polynomials, elementary statistics, nonlinear least-squares fits, unit calculations, Fortran-compatible text formatting, 3D visualization via VRML, and two Tk widgets for simple line plots and 3D wireframe models. | 2.198155 | 2 |
c2vqa-verbs/dataset/dataset-normalize.py | andeeptoor/qar-qae | 0 | 6619137 | import os
import pandas as pd
output_dir = os.path.join('/sb-personal/cvqa/', 'data/visual-genome/8-26-2017/generated-data/')
questions_output_file = os.path.join(output_dir, 'actions_vg_expanded_dataset-v3.csv')
new_questions_output_file = os.path.join(output_dir, 'specific_relevance_actions_vg_expanded_dataset-v2.csv')
df = pd.read_csv(questions_output_file)
print len(df)
reduce_list = ['no hold found','no stand found','no sit found']
for reduce_item in reduce_list:
length = len(df[df['answer'] == reduce_item])
remove_qa_ids = df[df['answer'] == reduce_item].sample(length-2000)['qa_id'].tolist()
remove_qa_ids += [(-1 * qa) for qa in remove_qa_ids]
remove_qa_ids = set(remove_qa_ids)
df = df[~df['qa_id'].isin(remove_qa_ids)]
grouped_df = df.groupby('answer', as_index=False).count().sort_values(['image_file'])
print grouped_df[['answer','image_file']]
print len(df)
df = df.copy()
df['specific_answer'] = ''
i = 0
total = len(df)
for _,row in df[df['qa_id'] < 0].iterrows():
if i == 1000:
print 'Question: [%d/%d]' % (i,total)
qa_id = -1 * row['qa_id']
# print qa_id
specific_answer = row['answer'][3:-7]
df.loc[df['qa_id'] == qa_id, 'answer'] = 'relevant because ' + row['answer'][3:]
df.loc[df['qa_id'] == qa_id, 'specific_answer'] = specific_answer
row['specific_answer'] = specific_answer
i+=1
# print df[df['qa_id'] == qa_id]
# df[df['qa_id']==qa_id]['answer'] = answer
# print df
df.to_csv(new_questions_output_file)
# grouped_df = df.groupby('answer', as_index=False).count().sort_values(['image_file'])
# print grouped_df[['answer','image_file']]
# df.to_csv(os.path.join(output_dir, 'sub_relevance_actions_vg_expanded_dataset.csv')) | import os
import pandas as pd
output_dir = os.path.join('/sb-personal/cvqa/', 'data/visual-genome/8-26-2017/generated-data/')
questions_output_file = os.path.join(output_dir, 'actions_vg_expanded_dataset-v3.csv')
new_questions_output_file = os.path.join(output_dir, 'specific_relevance_actions_vg_expanded_dataset-v2.csv')
df = pd.read_csv(questions_output_file)
print len(df)
reduce_list = ['no hold found','no stand found','no sit found']
for reduce_item in reduce_list:
length = len(df[df['answer'] == reduce_item])
remove_qa_ids = df[df['answer'] == reduce_item].sample(length-2000)['qa_id'].tolist()
remove_qa_ids += [(-1 * qa) for qa in remove_qa_ids]
remove_qa_ids = set(remove_qa_ids)
df = df[~df['qa_id'].isin(remove_qa_ids)]
grouped_df = df.groupby('answer', as_index=False).count().sort_values(['image_file'])
print grouped_df[['answer','image_file']]
print len(df)
df = df.copy()
df['specific_answer'] = ''
i = 0
total = len(df)
for _,row in df[df['qa_id'] < 0].iterrows():
if i == 1000:
print 'Question: [%d/%d]' % (i,total)
qa_id = -1 * row['qa_id']
# print qa_id
specific_answer = row['answer'][3:-7]
df.loc[df['qa_id'] == qa_id, 'answer'] = 'relevant because ' + row['answer'][3:]
df.loc[df['qa_id'] == qa_id, 'specific_answer'] = specific_answer
row['specific_answer'] = specific_answer
i+=1
# print df[df['qa_id'] == qa_id]
# df[df['qa_id']==qa_id]['answer'] = answer
# print df
df.to_csv(new_questions_output_file)
# grouped_df = df.groupby('answer', as_index=False).count().sort_values(['image_file'])
# print grouped_df[['answer','image_file']]
# df.to_csv(os.path.join(output_dir, 'sub_relevance_actions_vg_expanded_dataset.csv')) | en | 0.309089 | # print qa_id # print df[df['qa_id'] == qa_id] # df[df['qa_id']==qa_id]['answer'] = answer # print df # grouped_df = df.groupby('answer', as_index=False).count().sort_values(['image_file']) # print grouped_df[['answer','image_file']] # df.to_csv(os.path.join(output_dir, 'sub_relevance_actions_vg_expanded_dataset.csv')) | 2.762593 | 3 |
asf_tools/composite.py | jhkennedy/asf-tools | 2 | 6619138 | """Create a local-resolution-weighted composite from Sentinel-1 RTC products.
Create a local-resolution-weighted composite from a set of Sentinel-1 RTC
products (<NAME>, 2012). The local resolution, defined as the inverse of the
local contributing (scattering) area, is used to weight each RTC products'
contributions to the composite image on a pixel-by-pixel basis. The composite image
is created as a Cloud Optimized GeoTIFF (COG). Additionally, a COG specifying
the number of rasters contributing to each composite pixel is created.
References:
<NAME>, 2012: <https://doi.org/10.1109/IGARSS.2012.6350465>
"""
import argparse
import logging
import os
import sys
from pathlib import Path
from statistics import multimode
from tempfile import NamedTemporaryFile, TemporaryDirectory
from typing import List, Union
import numpy as np
from osgeo import gdal, osr
gdal.UseExceptions()
log = logging.getLogger(__name__)
def get_epsg_code(info: dict) -> int:
"""Get the EPSG code from a GDAL Info dictionary
Args:
info: The dictionary returned by a gdal.Info call
Returns:
epsg_code: The integer EPSG code
"""
proj = osr.SpatialReference(info['coordinateSystem']['wkt'])
epsg_code = int(proj.GetAttrValue('AUTHORITY', 1))
return epsg_code
def epsg_to_wkt(epsg_code: int) -> str:
"""Get the WKT representation of a projection from its EPSG code
Args:
epsg_code: The integer EPSG code
Returns:
wkt: The WKT representation of the projection
"""
srs = osr.SpatialReference()
srs.ImportFromEPSG(epsg_code)
return srs.ExportToWkt()
def get_target_epsg_code(codes: List[int]) -> int:
"""Determine the target UTM EPSG projection for the output composite
Args:
codes: List of UTM EPSG codes
Returns:
target: UTM EPSG code
"""
# use median east/west UTM zone of all files, regardless of hemisphere
# UTM EPSG codes for each hemisphere will look like:
# North: 326XX
# South: 327XX
valid_codes = list(range(32601, 32661)) + list(range(32701, 32761))
if bad_codes := set(codes) - set(valid_codes):
raise ValueError(f'Non UTM EPSG code encountered: {bad_codes}')
hemispheres = [c // 100 * 100 for c in codes]
# if even modes, choose lowest (North)
target_hemisphere = min(multimode(hemispheres))
zones = sorted([c % 100 for c in codes])
# if even length, choose fist of median two
target_zone = zones[(len(zones) - 1) // 2]
return target_hemisphere + target_zone
def get_area_raster(raster: str) -> str:
"""Determine the path of the area raster for a given backscatter raster based on naming conventions for HyP3 RTC
products
Args:
raster: path of the backscatter raster, e.g. S1A_IW_20181102T155531_DVP_RTC30_G_gpuned_5685_VV.tif
Returns:
area_raster: path of the area raster, e.g. S1A_IW_20181102T155531_DVP_RTC30_G_gpuned_5685_area.tif
"""
return '_'.join(raster.split('_')[:-1] + ['area.tif'])
def get_full_extent(raster_info: dict):
"""Determine the corner coordinates and geotransform for the full extent of a set of rasters
Args:
raster_info: A dictionary of gdal.Info results for the set of rasters
Returns:
upper_left: The upper left corner of the extent as a tuple
upper_right: The lower right corner of the extent as a tuple
geotransform: The geotransform of the extent as a list
"""
upper_left_corners = [info['cornerCoordinates']['upperLeft'] for info in raster_info.values()]
lower_right_corners = [info['cornerCoordinates']['lowerRight'] for info in raster_info.values()]
ulx = min([ul[0] for ul in upper_left_corners])
uly = max([ul[1] for ul in upper_left_corners])
lrx = max([lr[0] for lr in lower_right_corners])
lry = min([lr[1] for lr in lower_right_corners])
log.debug(f'Full extent raster upper left: ({ulx, uly}); lower right: ({lrx, lry})')
trans = []
for info in raster_info.values():
# Only need info from any one raster
trans = info['geoTransform']
break
trans[0] = ulx
trans[3] = uly
return (ulx, uly), (lrx, lry), trans
def reproject_to_target(raster_info: dict, target_epsg_code: int, target_resolution: float, directory: str) -> dict:
"""Reprojects a set of raster images to a common projection and resolution
Args:
raster_info: A dictionary of gdal.Info results for the set of rasters
target_epsg_code: The integer EPSG code for the target projection
target_resolution: The target resolution
directory: The directory in which to create the reprojected files
Returns:
target_raster_info: An updated dictionary of gdal.Info results for the reprojected files
"""
target_raster_info = {}
for raster, info in raster_info.items():
epsg_code = get_epsg_code(info)
resolution = info['geoTransform'][1]
if epsg_code != target_epsg_code or resolution != target_resolution:
log.info(f'Reprojecting {raster}')
reprojected_raster = os.path.join(directory, os.path.basename(raster))
gdal.Warp(
reprojected_raster, raster, dstSRS=f'EPSG:{target_epsg_code}',
xRes=target_resolution, yRes=target_resolution, targetAlignedPixels=True
)
area_raster = get_area_raster(raster)
log.info(f'Reprojecting {area_raster}')
reprojected_area_raster = os.path.join(directory, os.path.basename(area_raster))
gdal.Warp(
reprojected_area_raster, area_raster, dstSRS=f'EPSG:{target_epsg_code}',
xRes=target_resolution, yRes=target_resolution, targetAlignedPixels=True
)
target_raster_info[reprojected_raster] = gdal.Info(reprojected_raster, format='json')
else:
log.info(f'No need to reproject {raster}')
target_raster_info[raster] = info
return target_raster_info
def read_as_array(raster: str, band: int = 1) -> np.array:
"""Reads data from a raster image into memory
Args:
raster: The file path to a raster image
band: The raster band to read
Returns:
data: The raster pixel data as a numpy array
"""
log.debug(f'Reading raster values from {raster}')
ds = gdal.Open(raster)
data = ds.GetRasterBand(band).ReadAsArray()
del ds # How to close w/ gdal
return data
def write_cog(file_name: Union[str, Path], data: np.ndarray, transform: List[float], epsg_code: int,
dtype=gdal.GDT_Float32, nodata_value=None):
"""Creates a Cloud Optimized GeoTIFF
Args:
file_name: The output file name
data: The raster data
transform: The geotransform for the output GeoTIFF
epsg_code: The integer EPSG code for the output GeoTIFF projection
dtype: The pixel data type for the output GeoTIFF
nodata_value: The NODATA value for the output Geotiff
Returns:
file_name: The output file name
"""
log.info(f'Creating {file_name}')
with NamedTemporaryFile() as temp_file:
driver = gdal.GetDriverByName('GTiff')
temp_geotiff = driver.Create(temp_file.name, data.shape[1], data.shape[0], 1, dtype)
temp_geotiff.GetRasterBand(1).WriteArray(data)
if nodata_value is not None:
temp_geotiff.GetRasterBand(1).SetNoDataValue(nodata_value)
temp_geotiff.SetGeoTransform(transform)
temp_geotiff.SetProjection(epsg_to_wkt(epsg_code))
driver = gdal.GetDriverByName('COG')
options = ['COMPRESS=LZW', 'OVERVIEW_RESAMPLING=AVERAGE', 'NUM_THREADS=ALL_CPUS', 'BIGTIFF=YES']
driver.CreateCopy(str(file_name), temp_geotiff, options=options)
del temp_geotiff # How to close w/ gdal
return file_name
def make_composite(out_name: str, rasters: List[str], resolution: float = None):
"""Creates a local-resolution-weighted composite from Sentinel-1 RTC products
Args:
out_name: The base name of the output GeoTIFFs
rasters: A list of file paths of the images to composite
resolution: The pixel size for the output GeoTIFFs
Returns:
out_raster: Path to the created composite backscatter GeoTIFF
out_counts_raster: Path to the created GeoTIFF with counts of scenes contributing to each pixel
"""
if not rasters:
raise ValueError('Must specify at least one raster to composite')
raster_info = {}
for raster in rasters:
raster_info[raster] = gdal.Info(raster, format='json')
# make sure gdal can read the area raster
gdal.Info(get_area_raster(raster))
target_epsg_code = get_target_epsg_code([get_epsg_code(info) for info in raster_info.values()])
log.debug(f'Composite projection is EPSG:{target_epsg_code}')
if resolution is None:
resolution = max([info['geoTransform'][1] for info in raster_info.values()])
log.debug(f'Composite resolution is {resolution} meters')
# resample rasters to maximum resolution & common UTM zone
with TemporaryDirectory(prefix='reprojected_') as temp_dir:
raster_info = reproject_to_target(raster_info, target_epsg_code=target_epsg_code, target_resolution=resolution,
directory=temp_dir)
# Get extent of union of all images
full_ul, full_lr, full_trans = get_full_extent(raster_info)
nx = int(abs(full_ul[0] - full_lr[0]) // resolution)
ny = int(abs(full_ul[1] - full_lr[1]) // resolution)
outputs = np.zeros((ny, nx))
weights = np.zeros(outputs.shape)
counts = np.zeros(outputs.shape, dtype=np.int8)
for raster, info in raster_info.items():
log.info(f'Processing raster {raster}')
log.debug(f"Raster upper left: {info['cornerCoordinates']['upperLeft']}; "
f"lower right: {info['cornerCoordinates']['lowerRight']}")
values = read_as_array(raster)
area_raster = get_area_raster(raster)
areas = read_as_array(area_raster)
ulx, uly = info['cornerCoordinates']['upperLeft']
y_index_start = int((full_ul[1] - uly) // resolution)
y_index_end = y_index_start + values.shape[0]
x_index_start = int((ulx - full_ul[0]) // resolution)
x_index_end = x_index_start + values.shape[1]
log.debug(
f'Placing values in output grid at {y_index_start}:{y_index_end} and {x_index_start}:{x_index_end}'
)
mask = values == 0
raster_weights = 1.0 / areas
raster_weights[mask] = 0
outputs[y_index_start:y_index_end, x_index_start:x_index_end] += values * raster_weights
weights[y_index_start:y_index_end, x_index_start:x_index_end] += raster_weights
counts[y_index_start:y_index_end, x_index_start:x_index_end] += ~mask
del values, areas, mask, raster_weights
# Divide by the total weight applied
outputs /= weights
del weights
out_raster = write_cog(f'{out_name}.tif', outputs, full_trans, target_epsg_code, nodata_value=0)
del outputs
out_counts_raster = write_cog(f'{out_name}_counts.tif', counts, full_trans, target_epsg_code, dtype=gdal.GDT_Int16)
del counts
return out_raster, out_counts_raster
def main():
parser = argparse.ArgumentParser(
description=__doc__,
formatter_class=argparse.RawDescriptionHelpFormatter,
)
parser.add_argument('out_name', help='Base name of output composite GeoTIFF (without extension)')
parser.add_argument('rasters', nargs='+', help='Sentinel-1 GeoTIFF rasters to composite')
parser.add_argument('-r', '--resolution', type=float,
help='Desired output resolution in meters '
'(default is the max resolution of all the input files)')
parser.add_argument('-v', '--verbose', action='store_true', help='Turn on verbose logging')
args = parser.parse_args()
level = logging.DEBUG if args.verbose else logging.INFO
logging.basicConfig(stream=sys.stdout, format='%(asctime)s - %(levelname)s - %(message)s', level=level)
log.debug(' '.join(sys.argv))
log.info(f'Creating a composite of {len(args.rasters)} rasters')
raster, counts = make_composite(args.out_name, args.rasters, args.resolution)
log.info(f'Composite created successfully: {raster}')
log.info(f'Number of rasters contributing to each pixel: {counts}')
| """Create a local-resolution-weighted composite from Sentinel-1 RTC products.
Create a local-resolution-weighted composite from a set of Sentinel-1 RTC
products (<NAME>, 2012). The local resolution, defined as the inverse of the
local contributing (scattering) area, is used to weight each RTC products'
contributions to the composite image on a pixel-by-pixel basis. The composite image
is created as a Cloud Optimized GeoTIFF (COG). Additionally, a COG specifying
the number of rasters contributing to each composite pixel is created.
References:
<NAME>, 2012: <https://doi.org/10.1109/IGARSS.2012.6350465>
"""
import argparse
import logging
import os
import sys
from pathlib import Path
from statistics import multimode
from tempfile import NamedTemporaryFile, TemporaryDirectory
from typing import List, Union
import numpy as np
from osgeo import gdal, osr
gdal.UseExceptions()
log = logging.getLogger(__name__)
def get_epsg_code(info: dict) -> int:
"""Get the EPSG code from a GDAL Info dictionary
Args:
info: The dictionary returned by a gdal.Info call
Returns:
epsg_code: The integer EPSG code
"""
proj = osr.SpatialReference(info['coordinateSystem']['wkt'])
epsg_code = int(proj.GetAttrValue('AUTHORITY', 1))
return epsg_code
def epsg_to_wkt(epsg_code: int) -> str:
"""Get the WKT representation of a projection from its EPSG code
Args:
epsg_code: The integer EPSG code
Returns:
wkt: The WKT representation of the projection
"""
srs = osr.SpatialReference()
srs.ImportFromEPSG(epsg_code)
return srs.ExportToWkt()
def get_target_epsg_code(codes: List[int]) -> int:
"""Determine the target UTM EPSG projection for the output composite
Args:
codes: List of UTM EPSG codes
Returns:
target: UTM EPSG code
"""
# use median east/west UTM zone of all files, regardless of hemisphere
# UTM EPSG codes for each hemisphere will look like:
# North: 326XX
# South: 327XX
valid_codes = list(range(32601, 32661)) + list(range(32701, 32761))
if bad_codes := set(codes) - set(valid_codes):
raise ValueError(f'Non UTM EPSG code encountered: {bad_codes}')
hemispheres = [c // 100 * 100 for c in codes]
# if even modes, choose lowest (North)
target_hemisphere = min(multimode(hemispheres))
zones = sorted([c % 100 for c in codes])
# if even length, choose fist of median two
target_zone = zones[(len(zones) - 1) // 2]
return target_hemisphere + target_zone
def get_area_raster(raster: str) -> str:
"""Determine the path of the area raster for a given backscatter raster based on naming conventions for HyP3 RTC
products
Args:
raster: path of the backscatter raster, e.g. S1A_IW_20181102T155531_DVP_RTC30_G_gpuned_5685_VV.tif
Returns:
area_raster: path of the area raster, e.g. S1A_IW_20181102T155531_DVP_RTC30_G_gpuned_5685_area.tif
"""
return '_'.join(raster.split('_')[:-1] + ['area.tif'])
def get_full_extent(raster_info: dict):
"""Determine the corner coordinates and geotransform for the full extent of a set of rasters
Args:
raster_info: A dictionary of gdal.Info results for the set of rasters
Returns:
upper_left: The upper left corner of the extent as a tuple
upper_right: The lower right corner of the extent as a tuple
geotransform: The geotransform of the extent as a list
"""
upper_left_corners = [info['cornerCoordinates']['upperLeft'] for info in raster_info.values()]
lower_right_corners = [info['cornerCoordinates']['lowerRight'] for info in raster_info.values()]
ulx = min([ul[0] for ul in upper_left_corners])
uly = max([ul[1] for ul in upper_left_corners])
lrx = max([lr[0] for lr in lower_right_corners])
lry = min([lr[1] for lr in lower_right_corners])
log.debug(f'Full extent raster upper left: ({ulx, uly}); lower right: ({lrx, lry})')
trans = []
for info in raster_info.values():
# Only need info from any one raster
trans = info['geoTransform']
break
trans[0] = ulx
trans[3] = uly
return (ulx, uly), (lrx, lry), trans
def reproject_to_target(raster_info: dict, target_epsg_code: int, target_resolution: float, directory: str) -> dict:
"""Reprojects a set of raster images to a common projection and resolution
Args:
raster_info: A dictionary of gdal.Info results for the set of rasters
target_epsg_code: The integer EPSG code for the target projection
target_resolution: The target resolution
directory: The directory in which to create the reprojected files
Returns:
target_raster_info: An updated dictionary of gdal.Info results for the reprojected files
"""
target_raster_info = {}
for raster, info in raster_info.items():
epsg_code = get_epsg_code(info)
resolution = info['geoTransform'][1]
if epsg_code != target_epsg_code or resolution != target_resolution:
log.info(f'Reprojecting {raster}')
reprojected_raster = os.path.join(directory, os.path.basename(raster))
gdal.Warp(
reprojected_raster, raster, dstSRS=f'EPSG:{target_epsg_code}',
xRes=target_resolution, yRes=target_resolution, targetAlignedPixels=True
)
area_raster = get_area_raster(raster)
log.info(f'Reprojecting {area_raster}')
reprojected_area_raster = os.path.join(directory, os.path.basename(area_raster))
gdal.Warp(
reprojected_area_raster, area_raster, dstSRS=f'EPSG:{target_epsg_code}',
xRes=target_resolution, yRes=target_resolution, targetAlignedPixels=True
)
target_raster_info[reprojected_raster] = gdal.Info(reprojected_raster, format='json')
else:
log.info(f'No need to reproject {raster}')
target_raster_info[raster] = info
return target_raster_info
def read_as_array(raster: str, band: int = 1) -> np.array:
"""Reads data from a raster image into memory
Args:
raster: The file path to a raster image
band: The raster band to read
Returns:
data: The raster pixel data as a numpy array
"""
log.debug(f'Reading raster values from {raster}')
ds = gdal.Open(raster)
data = ds.GetRasterBand(band).ReadAsArray()
del ds # How to close w/ gdal
return data
def write_cog(file_name: Union[str, Path], data: np.ndarray, transform: List[float], epsg_code: int,
dtype=gdal.GDT_Float32, nodata_value=None):
"""Creates a Cloud Optimized GeoTIFF
Args:
file_name: The output file name
data: The raster data
transform: The geotransform for the output GeoTIFF
epsg_code: The integer EPSG code for the output GeoTIFF projection
dtype: The pixel data type for the output GeoTIFF
nodata_value: The NODATA value for the output Geotiff
Returns:
file_name: The output file name
"""
log.info(f'Creating {file_name}')
with NamedTemporaryFile() as temp_file:
driver = gdal.GetDriverByName('GTiff')
temp_geotiff = driver.Create(temp_file.name, data.shape[1], data.shape[0], 1, dtype)
temp_geotiff.GetRasterBand(1).WriteArray(data)
if nodata_value is not None:
temp_geotiff.GetRasterBand(1).SetNoDataValue(nodata_value)
temp_geotiff.SetGeoTransform(transform)
temp_geotiff.SetProjection(epsg_to_wkt(epsg_code))
driver = gdal.GetDriverByName('COG')
options = ['COMPRESS=LZW', 'OVERVIEW_RESAMPLING=AVERAGE', 'NUM_THREADS=ALL_CPUS', 'BIGTIFF=YES']
driver.CreateCopy(str(file_name), temp_geotiff, options=options)
del temp_geotiff # How to close w/ gdal
return file_name
def make_composite(out_name: str, rasters: List[str], resolution: float = None):
"""Creates a local-resolution-weighted composite from Sentinel-1 RTC products
Args:
out_name: The base name of the output GeoTIFFs
rasters: A list of file paths of the images to composite
resolution: The pixel size for the output GeoTIFFs
Returns:
out_raster: Path to the created composite backscatter GeoTIFF
out_counts_raster: Path to the created GeoTIFF with counts of scenes contributing to each pixel
"""
if not rasters:
raise ValueError('Must specify at least one raster to composite')
raster_info = {}
for raster in rasters:
raster_info[raster] = gdal.Info(raster, format='json')
# make sure gdal can read the area raster
gdal.Info(get_area_raster(raster))
target_epsg_code = get_target_epsg_code([get_epsg_code(info) for info in raster_info.values()])
log.debug(f'Composite projection is EPSG:{target_epsg_code}')
if resolution is None:
resolution = max([info['geoTransform'][1] for info in raster_info.values()])
log.debug(f'Composite resolution is {resolution} meters')
# resample rasters to maximum resolution & common UTM zone
with TemporaryDirectory(prefix='reprojected_') as temp_dir:
raster_info = reproject_to_target(raster_info, target_epsg_code=target_epsg_code, target_resolution=resolution,
directory=temp_dir)
# Get extent of union of all images
full_ul, full_lr, full_trans = get_full_extent(raster_info)
nx = int(abs(full_ul[0] - full_lr[0]) // resolution)
ny = int(abs(full_ul[1] - full_lr[1]) // resolution)
outputs = np.zeros((ny, nx))
weights = np.zeros(outputs.shape)
counts = np.zeros(outputs.shape, dtype=np.int8)
for raster, info in raster_info.items():
log.info(f'Processing raster {raster}')
log.debug(f"Raster upper left: {info['cornerCoordinates']['upperLeft']}; "
f"lower right: {info['cornerCoordinates']['lowerRight']}")
values = read_as_array(raster)
area_raster = get_area_raster(raster)
areas = read_as_array(area_raster)
ulx, uly = info['cornerCoordinates']['upperLeft']
y_index_start = int((full_ul[1] - uly) // resolution)
y_index_end = y_index_start + values.shape[0]
x_index_start = int((ulx - full_ul[0]) // resolution)
x_index_end = x_index_start + values.shape[1]
log.debug(
f'Placing values in output grid at {y_index_start}:{y_index_end} and {x_index_start}:{x_index_end}'
)
mask = values == 0
raster_weights = 1.0 / areas
raster_weights[mask] = 0
outputs[y_index_start:y_index_end, x_index_start:x_index_end] += values * raster_weights
weights[y_index_start:y_index_end, x_index_start:x_index_end] += raster_weights
counts[y_index_start:y_index_end, x_index_start:x_index_end] += ~mask
del values, areas, mask, raster_weights
# Divide by the total weight applied
outputs /= weights
del weights
out_raster = write_cog(f'{out_name}.tif', outputs, full_trans, target_epsg_code, nodata_value=0)
del outputs
out_counts_raster = write_cog(f'{out_name}_counts.tif', counts, full_trans, target_epsg_code, dtype=gdal.GDT_Int16)
del counts
return out_raster, out_counts_raster
def main():
parser = argparse.ArgumentParser(
description=__doc__,
formatter_class=argparse.RawDescriptionHelpFormatter,
)
parser.add_argument('out_name', help='Base name of output composite GeoTIFF (without extension)')
parser.add_argument('rasters', nargs='+', help='Sentinel-1 GeoTIFF rasters to composite')
parser.add_argument('-r', '--resolution', type=float,
help='Desired output resolution in meters '
'(default is the max resolution of all the input files)')
parser.add_argument('-v', '--verbose', action='store_true', help='Turn on verbose logging')
args = parser.parse_args()
level = logging.DEBUG if args.verbose else logging.INFO
logging.basicConfig(stream=sys.stdout, format='%(asctime)s - %(levelname)s - %(message)s', level=level)
log.debug(' '.join(sys.argv))
log.info(f'Creating a composite of {len(args.rasters)} rasters')
raster, counts = make_composite(args.out_name, args.rasters, args.resolution)
log.info(f'Composite created successfully: {raster}')
log.info(f'Number of rasters contributing to each pixel: {counts}')
| en | 0.735181 | Create a local-resolution-weighted composite from Sentinel-1 RTC products. Create a local-resolution-weighted composite from a set of Sentinel-1 RTC products (<NAME>, 2012). The local resolution, defined as the inverse of the local contributing (scattering) area, is used to weight each RTC products' contributions to the composite image on a pixel-by-pixel basis. The composite image is created as a Cloud Optimized GeoTIFF (COG). Additionally, a COG specifying the number of rasters contributing to each composite pixel is created. References: <NAME>, 2012: <https://doi.org/10.1109/IGARSS.2012.6350465> Get the EPSG code from a GDAL Info dictionary Args: info: The dictionary returned by a gdal.Info call Returns: epsg_code: The integer EPSG code Get the WKT representation of a projection from its EPSG code Args: epsg_code: The integer EPSG code Returns: wkt: The WKT representation of the projection Determine the target UTM EPSG projection for the output composite Args: codes: List of UTM EPSG codes Returns: target: UTM EPSG code # use median east/west UTM zone of all files, regardless of hemisphere # UTM EPSG codes for each hemisphere will look like: # North: 326XX # South: 327XX # if even modes, choose lowest (North) # if even length, choose fist of median two Determine the path of the area raster for a given backscatter raster based on naming conventions for HyP3 RTC products Args: raster: path of the backscatter raster, e.g. S1A_IW_20181102T155531_DVP_RTC30_G_gpuned_5685_VV.tif Returns: area_raster: path of the area raster, e.g. S1A_IW_20181102T155531_DVP_RTC30_G_gpuned_5685_area.tif Determine the corner coordinates and geotransform for the full extent of a set of rasters Args: raster_info: A dictionary of gdal.Info results for the set of rasters Returns: upper_left: The upper left corner of the extent as a tuple upper_right: The lower right corner of the extent as a tuple geotransform: The geotransform of the extent as a list # Only need info from any one raster Reprojects a set of raster images to a common projection and resolution Args: raster_info: A dictionary of gdal.Info results for the set of rasters target_epsg_code: The integer EPSG code for the target projection target_resolution: The target resolution directory: The directory in which to create the reprojected files Returns: target_raster_info: An updated dictionary of gdal.Info results for the reprojected files Reads data from a raster image into memory Args: raster: The file path to a raster image band: The raster band to read Returns: data: The raster pixel data as a numpy array # How to close w/ gdal Creates a Cloud Optimized GeoTIFF Args: file_name: The output file name data: The raster data transform: The geotransform for the output GeoTIFF epsg_code: The integer EPSG code for the output GeoTIFF projection dtype: The pixel data type for the output GeoTIFF nodata_value: The NODATA value for the output Geotiff Returns: file_name: The output file name # How to close w/ gdal Creates a local-resolution-weighted composite from Sentinel-1 RTC products Args: out_name: The base name of the output GeoTIFFs rasters: A list of file paths of the images to composite resolution: The pixel size for the output GeoTIFFs Returns: out_raster: Path to the created composite backscatter GeoTIFF out_counts_raster: Path to the created GeoTIFF with counts of scenes contributing to each pixel # make sure gdal can read the area raster # resample rasters to maximum resolution & common UTM zone # Get extent of union of all images # Divide by the total weight applied | 2.892535 | 3 |
09_files/Curso_Python_3/io_v4.py | smartao/estudos_python | 0 | 6619139 | <gh_stars>0
#!/usr/bin/python3
#
'''
Leitura via stream
Usando o try finally
O finally sempre é executado mesmo quando ocorrer um erro no codigo
Mesmo se existir o except o finally sera executado posteriomente
'''
try:
arquivo = open('pessoas.csv')
for registro in arquivo:
print('Nome: {}, Idade: {}'.format(*registro.strip().split(',')))
finally:
arquivo.close()
if arquivo.closed:
print('Arquivo já foi fechado!')
# Fontes:
# Curso Python 3 - Curso Completo do Básico ao Avançado Udemy Aula 97 a 107
# https://github.com/cod3rcursos/curso-python/tree/master/manipulacao_arquivos
| #!/usr/bin/python3
#
'''
Leitura via stream
Usando o try finally
O finally sempre é executado mesmo quando ocorrer um erro no codigo
Mesmo se existir o except o finally sera executado posteriomente
'''
try:
arquivo = open('pessoas.csv')
for registro in arquivo:
print('Nome: {}, Idade: {}'.format(*registro.strip().split(',')))
finally:
arquivo.close()
if arquivo.closed:
print('Arquivo já foi fechado!')
# Fontes:
# Curso Python 3 - Curso Completo do Básico ao Avançado Udemy Aula 97 a 107
# https://github.com/cod3rcursos/curso-python/tree/master/manipulacao_arquivos | pt | 0.807627 | #!/usr/bin/python3 # Leitura via stream Usando o try finally O finally sempre é executado mesmo quando ocorrer um erro no codigo Mesmo se existir o except o finally sera executado posteriomente # Fontes: # Curso Python 3 - Curso Completo do Básico ao Avançado Udemy Aula 97 a 107 # https://github.com/cod3rcursos/curso-python/tree/master/manipulacao_arquivos | 3.557856 | 4 |
models/a3c.py | schio/coin-auto-trade-for-SOSCON-2019 | 0 | 6619140 | import numpy as np
np.random.seed(7)
import tensorflow as tf
import datetime
import time
import threading
import math
import random
random.seed(7)
import os
import matplotlib.pyplot as plt
import matplotlib.animation as animation
from keras.models import *
from keras.layers import *
from keras import backend as K
from enum import Enum
from time import sleep
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'
art = """
.d8888b. 888 d88888888888
d88P Y88b 888 d88888 888
888 888 888 d88P888 888
888 888d888888 88888888b. 888888 .d88b. d88P 888 888
888 888P" 888 888888 "88b888 d88""88b d88P 888 888
888 888888 888 888888 888888 888 888 d88P 888 888
Y88b d88P888 Y88b 888888 d88PY88b. Y88..88P d8888888888 888
"Y8888P" 888 "Y8888888888P" "Y888 "Y88P" d88P 8888888888
888888 d88P
Y8b d88P888 d88P
"Y88P" 888 d88P by UmeW
****** Deep AC3 Trader ******
"""
THREADS = 8
np.set_printoptions(linewidth = 500)
# HyperParams
LOSS_V = .5 # v loss coefficient
LOSS_ENTROPY = 0.1 # entropy coefficient
LEARNING_RATE = 1e-4
EPS_START = 0.5
EPS_END = 0.1
EPS_SLOPE = 600
N_STEP_RETURN = 8
MIN_BATCH = 32
NUM_HISTORY = 300
NUM_STATE = 1 * NUM_HISTORY + 1 + 1 + 1 + 1# Scrapped data + (Shares bought?) + (Budget?)
NUM_DENSE = 120
NUM_DENSE2 = 30
GAMMA = 0.99
GAMMA_N = GAMMA ** N_STEP_RETURN
CAN_SHORT = False
NUM_ACTIONS = 3 # Buy = 0 , Sell = 1 , Hold = 2
# States Var
mdPrice = []
mdPriceMin = []
mdPriceMax = []
mdBSRatio = []
mdVolume = []
mdVar = [0] * THREADS
mdMean = [0] * THREADS
mdTimeMax = [0] * THREADS
aHistory = [[] for i in range(THREADS)]
stopSignal = False
testFile = open("result2.test", "a")
print(art)
loadData()
if False :
HP_LOSS_V = [0.5]
HP_LOSS_ENTROPY = [0.01,0.001,0.1,0.5,1,10]
HP_LEARNING_RATE = [1e-4,5e-4,1e-3,1e-2]
HP_EPS_START = [0.5,0.6,0.7,0.4,0.3]
HP_EPS_END = [0.15,0.05,0.25,0.1]
HP_EPS_SLOPE = [10, 15, 5]
HP_N_STEP_RETURN = [8]
HP_MIN_BATCH = [32,64,512]
HP_NUM_HISTORY = [1,2,3]
HP_GAMMA = [0.99]
for loss_v in HP_LOSS_V:
LOSS_V = loss_v
for eps_start in HP_EPS_START:
EPS_START = eps_start
for eps_end in HP_EPS_END:
EPS_END = eps_end
for eps_slope in HP_EPS_SLOPE:
EPS_SLOPE = eps_slope
for n_step_return in HP_N_STEP_RETURN:
N_STEP_RETURN = n_step_return
for min_batch in HP_MIN_BATCH:
MIN_BATCH = min_batch
for num_history in HP_NUM_HISTORY:
NUM_STATE = 1 * NUM_HISTORY + 1 + 1 + 1
HP_NUM_DENSE = [30, 10, 100]
for num_dense in HP_NUM_DENSE:
NUM_DENSE = num_dense
for loss_entropy in HP_LOSS_ENTROPY:
LOSS_ENTROPY = loss_entropy
for learning_rate in HP_LEARNING_RATE:
LEARNING_RATE = learning_rate
for gamma in HP_GAMMA:
GAMMA = gamma
GAMMA_N = GAMMA ** N_STEP_RETURN
result = start()
strin = ("loss_v: " + str(loss_v) +
" | loss_entropy: " + str(loss_entropy) +
" | learning_rate: " + str(learning_rate) +
" | eps_start: " + str(eps_start) +
" | eps_end: " + str(eps_end) +
" | eps_slope: " + str(eps_slope) +
" | n_step_return: " + str(n_step_return) +
" | min_batch: " + str(min_batch) +
" | num_history: " + str(num_history) +
" | num_dense: " + str(num_dense) +
" | result: " + str(result) + "\n"
)
print(strin)
else :
print(start())
testFile.close
plt.ion()
fig = plt.figure()
lines = []
prices = []
for i in range(THREADS):
x = np.arange(NUM_HISTORY - 1, mdTimeMax[i%8], 1)
priceA = mdPrice[i % 8 ][NUM_HISTORY - 1: mdTimeMax[i % 8] ]
priceA = np.array([(x - mdMean[i % 8 ])/ mdVar[i % 8 ] for x in priceA])
prices.append(priceA)
ax = fig.add_subplot(int(THREADS/2), 2, i + 1)
acts = aHistory[i][0]
fill = [-1]*(mdTimeMax[i] - NUM_HISTORY- len(acts) + 1)
actions = np.array( acts + fill ) + 1
beee,line = ax.plot(x, priceA, 'b-', x, actions, 'ro')
lines.append(line)
plt.title(str(i))
fig.canvas.draw()
k = 0
while k < 1000:
for i in range(THREADS):
if k < len(aHistory[i]):
acts = aHistory[i][k]
fill = [-1]*(mdTimeMax[i] - NUM_HISTORY - len(acts) + 1)
actions = np.array( acts + fill ) + 1
else:
acts = aHistory[i][-1]
fill = [-1]*(mdTimeMax[i] - NUM_HISTORY - len(acts) + 1)
actions = np.array( acts + fill ) + 1
lines[i].set_ydata(actions)
k += 1
t = time.time()
while time.time() < t + 1 :
fig.canvas.flush_events()
sleep(0.001)
class Action(Enum):
BUY = 0
SELL = 1
HOLD = 2
def loadData():
j = 0
for j in range(0, 8):
with open('training2/training_'+ str(j) +'.data', 'r') as f:
buf = f.readlines()
mdPrice.append([])
mdPriceMin.append([])
mdPriceMax.append([])
mdBSRatio.append([])
mdVolume.append([])
esp = 0
esp2 = 0
for line in buf: # we should test if everything good at import
dat = line.split(' ')
#>>> t = "2017-12-08 23:22:00 16066.530120481928 16060 16072 38 225691"
#['2017-12-08', '23:22:00', '16066.530120481928', '16060', '16072', '38', '225691']
mdPrice[j].append(float(dat[2]))
esp += float(dat[2])
esp2 += float(dat[2]) ** 2
mdPriceMin[j].append(float(dat[3]))
mdPriceMax[j].append(float(dat[4]))
mdBSRatio[j].append(float(dat[5]))
mdVolume[j].append(float(dat[6]))
mdTimeMax[j] = int(len(buf))
esp = esp / mdTimeMax[j]
esp2 = esp2 / mdTimeMax[j]
mdVar[j] = math.sqrt(esp2 - (esp ** 2))
mdMean[j] = esp
#print(mdVar[j])
class Brain():
def __init__(self):
g = tf.Graph()
SESSION = tf.Session(graph=g)
self.session = SESSION
with g.as_default():
tf.set_random_seed(7)
K.set_session(self.session)
K.manual_variable_initialization(True)
self.model = self.BuildModel()
self.graph = self.BuildGraph()
self.session.run(tf.global_variables_initializer())
self.default_graph = tf.get_default_graph()
#self.default_graph.finalize()
self.buffer = [[], [], [], [], []]
self.lock = threading.Lock()
def BuildModel(self):
l_input = Input(batch_shape=(None, NUM_STATE))
#l_predense = Dense(NUM_DENSE, activation='relu', kernel_regularizer=regularizers.l2(0.01))(l_input)
#l_dense = Dense(NUM_DENSE, activation='relu', kernel_regularizer=regularizers.l2(0.01))(l_predense)
l_predense = Dense(NUM_DENSE, activation='tanh')(l_input)
l_dense = Dense(NUM_DENSE, activation='tanh')(l_predense)
out_actions = Dense(NUM_ACTIONS, activation='softmax')(l_dense)
out_value = Dense(1, activation='linear')(l_dense)
model = Model(inputs=[l_input], outputs=[out_actions, out_value])
model._make_predict_function()
self.intermediateModel = Model(inputs=[l_input], outputs=[l_dense])
self.intermediateModel._make_predict_function()
return model
def BuildGraph(self):
s_t = tf.placeholder(tf.float64, shape=(None, NUM_STATE))
r_t = tf.placeholder(tf.float64, shape=(None, 1)) # r + gamma vs'
a_t = tf.placeholder(tf.float64, shape=(None, NUM_ACTIONS))
p_t, v_t = self.model(s_t)
advantage = r_t - v_t
log_prob = tf.log(tf.reduce_sum(p_t * a_t, axis=1, keep_dims=True) + 1e-10)
loss_policy = - log_prob * tf.stop_gradient(advantage)
loss_value = LOSS_V * tf.square(advantage)
entropy = LOSS_ENTROPY * tf.reduce_sum(p_t * tf.log(p_t + 1e-10), axis=1, keep_dims=True)
loss_total = tf.reduce_mean(loss_policy + loss_value + entropy)
#loss_total = tf.reduce_mean(entropy)
self.loss = loss_total
optimizer = tf.train.RMSPropOptimizer(LEARNING_RATE, decay=.99)
minimize = optimizer.minimize(loss_total)
return s_t, a_t, r_t, minimize
def getPrediction(self, s):
with self.default_graph.as_default():
#print(self.intermediateModel.predict(s))
p, v = self.model.predict(s)
#print(p)
#s_t, a_t, r_t, minimize = self.graph
#k = self.session.run(self.entropy, feed_dict={s_t: s})
#print(k)
return p, v
def getValue(self, s):
with self.default_graph.as_default():
p, v = self.model.predict(s)
return v
def getPolicy(self, s):
with self.default_graph.as_default():
p, v = self.model.predict(s)
return p
def pushTraining(self, action, reward, oldStep, newStep, threadId):
with self.lock:
act = np.zeros(NUM_ACTIONS)
act[action] = 1
self.buffer[0].append(act)
self.buffer[1].append(reward)
self.buffer[2].append(oldStep)
if newStep is None:
self.buffer[3].append(np.zeros(NUM_STATE))
self.buffer[4].append(0)
else:
self.buffer[3].append(newStep)
self.buffer[4].append(1)
def optimize(self):
if len(self.buffer[0]) > MIN_BATCH :
batch = []
with self.lock:
batch = self.buffer
self.buffer = [[], [], [], [], []]
#print(self.threadC)
s_t, a_t, r_t, minimize = self.graph
a = np.vstack(batch[0])
r = np.vstack(batch[1])
s = np.vstack(batch[2])
newStates = np.vstack(batch[3])
newStatesMask = np.vstack(batch[4])
newStatesValue = self.getValue(newStates)
rew = r + newStatesValue * GAMMA_N * newStatesMask
#x = np.hstack([s,a,r,newStatesValue,newStatesMask,rew])
#print(x)
#print(len(s))
#if len(s) > 5*MIN_BATCH: print("Optimizer alert! Minimizing batch of %d" % len(s))
#print("*************************************")
#print(s)
self.session.run(minimize, feed_dict={s_t: s, a_t: a, r_t: rew})
#for i in range(0,100):
# self.session.run(minimize, feed_dict={s_t: s, a_t: a, r_t: rew})
# k = self.session.run(self.loss, feed_dict={s_t: s, a_t: a, r_t: rew})
# print(k)
class Optimizer(threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
def run(self):
while not stopSignal:
sleep(0.001)
brain.optimize()
class Actor(threading.Thread):
def __init__(self, idt, isTest):
threading.Thread.__init__(self)
self.id = idt
self.isTest = isTest
self.steps = 0
self.simCount = 0
#print("Actor " + str(idt) +" created")
def run(self):
#print("Actor " + str(self.id) +" started")
if self.isTest :
self.startSimulation()
else:
while not stopSignal:
sleep(0)
self.startSimulation()
self.simCount += 1
#print(str(self.id) + " : " + str(self.steps))
def normalizeBin(self, value):
#if value > 0:
# return [1]
#else:
# return [-1]
if value > 255 :
return [0.9]*8
r = value
ret = [0.1] * 8
i = 0
while r > 0:
x = (r % 2)*(0.8) + 0.1
ret[i] = x
r = int(r/2)
i +=1
return ret
def startSimulation(self):
if self.isTest:
self.budget = 300
elif self.simCount < 100:
self.budget = random.randint(1000, 5000) *( self.id + 1)
elif self.simCount < 200:
self.budget = random.randint(9 *(100 - self.simCount) + 1000, 45 * (100 - self.simCount) + 5000) *( self.id + 1)
else:
self.budget = 300
self.budget = random.randint(50000, 100000) *( self.id + 1)
self.initbud = self.budget
self.shares = 0
self.mem = [] # (a, t, st, r)
self.r = 0
t = NUM_HISTORY - 1
self.timeMax = mdTimeMax[self.id % 8] #random.randint(NUM_HISTORY , mdTimeMax[self.id % 8])
if self.id == 12:
t = self.simCount % ( mdTimeMax[self.id % 8] - NUM_HISTORY - 50) + NUM_HISTORY - 1
self.timeMax = t + 50
totalSteps = self.timeMax - t
self.R = 0
#print("t " + str(t) +" timemax " + str(mdTimeMax))
actions = []
self.badActions = 0
kill = False
while t < self.timeMax - 1 and not kill:
#st = ([[self.budget] + [self.shares] + mdPrice[t + 1 - NUM_HISTORY: t + 1] + mdPriceMin[t + 1 - NUM_HISTORY: t + 1] +
#mdPriceMax[t + 1 - NUM_HISTORY: t + 1] + mdBSRatio[t + 1 - NUM_HISTORY: t + 1] + mdVolume[t + 1 - NUM_HISTORY: t + 1]])
if self.budget >= mdPrice[self.id % 8][t] :
canBuy = [1]
else :
canBuy = [-1]
priceA = mdPrice[self.id % 8 ][t + 1 - NUM_HISTORY: t + 1]
priceA = [(x - mdMean[self.id % 8 ]) / mdVar[self.id % 8 ] for x in priceA]
st = [[(self.shares * mdPrice[self.id % 8 ][t])/mdVar[self.id % 8] ] + canBuy + [(self.budget )/ mdVar[self.id % 8], mdMean[self.id % 8 ]/mdVar[self.id % 8] ] + priceA]
state = np.array(st)
#if self.isTest : print(state)
a, v = self.getActionValue(state, t)
if self.id == 12:
#if self.isTest: print("self.budget: " + str(self.budget) + " - p: " + str(mdPrice[self.id % 8 ][t]))
if self.budget > mdPrice[self.id % 8 ][t] and mdPrice[self.id % 8 ][t] < mdMean[self.id % 8 ]:
a = 0
elif self.shares > 0 and mdPrice[self.id % 8 ][t] > mdMean[self.id % 8 ]:
a = 1
else :
a = 2
if self.isTest and False:
print("time: " + str(t) + " | price: " + str(mdPrice[self.id][t]))
print("budget: " + str(self.budget) + "| shares:" + str(self.shares))
print("action: " + Action(a).name)
testFile.write(str(mdPrice[self.id][t])+ " "+ str(a)+ "\n")
r = self.act(state, t, a)
self.R += r
# print("reward: " + str(r))
self.pushTraining(a,t,st,r)
#if(t%3 == 0):
while (len(brain.buffer[0]) > MIN_BATCH and not stopSignal) :
sleep(0)
t += 1
self.steps += 1
actions.append(a)
if(self.budget <= 0 and self.shares == 0):
kill = True
ratioComplete = (t*100)/(self.timeMax - 1)
aHistory[self.id].append(actions)
badActionPct = self.badActions
print("Actor " + str(self.id) +" FINISH -- REWARD: " + str(self.R)+ " -- Bad: " + str(badActionPct) + " -- SimC: " + str(self.simCount)+ " -- Comp:%" + str(ratioComplete) )
def pushTraining(self,a,t,st,r):
# a debug
self.mem.append((a,t,st,r))
self.r = (self.r + GAMMA_N * r) / GAMMA
#print("selfR: " + str(self.r))
if t == self.timeMax - 2:
while len(self.mem) > 0:
brain.pushTraining(self.mem[0][0], self.r, self.mem[0][2], None, self.id)
self.r = (self.r - self.mem[0][3])/GAMMA
self.mem.pop(0)
elif len(self.mem) == N_STEP_RETURN :
brain.pushTraining(self.mem[0][0], self.r, self.mem[0][2], st, self.id)
self.r = self.r - self.mem[0][3]
self.mem.pop(0)
#print("NselfR: " + str(self.r))
#print("\n\n")
def getActionValue(self, state, time):
eps = self.getEps(self.steps)
a, v = brain.getPrediction(state)
#print("TIME: " + str(time) + " - EPS: " + str(eps) + " - VAL: " + str(v))
if random.random() < eps:
return random.randint(0, NUM_ACTIONS - 1), v
else:
return np.argmax(a), v
def act(self, state, time, action):
action = Action(action)
oldPortfolio = self.budget + self.shares * mdPrice[self.id % 8][time]
oldBud = self.budget
#print("oldportfolioValue: " + str(oldPortfolio))
if action == Action.BUY and self.budget >= mdPrice[self.id % 8][time]:
self.shares += 1
self.budget -= mdPrice[self.id % 8][time]
elif action == Action.BUY and self.budget < mdPrice[self.id % 8][time]:
self.budget -= mdPrice[self.id % 8][time] * 3
self.badActions += 1
elif action == Action.SELL and (self.shares > 0 or CAN_SHORT):
self.budget += mdPrice[self.id % 8][time] * self.shares
self.shares = 0
elif action == Action.SELL and (self.shares <= 0 and not CAN_SHORT):
self.budget -= mdPrice[self.id % 8 ][time] * 3
self.badActions += 1
#elif action == Action.HOLD and self.shares == 0:
# self.budget -= mdPrice[self.id % 8][time] * 10
# self.badActions += 1
newPortfolio = self.budget + self.shares * mdPrice[self.id % 8][time + 1]
#print("newportfolioValue: " + str(newPortfolio))
#return newPortfolio - oldPortfolio
#return (self.budget - oldBud)/(mdVar[self.id % 8])
return newPortfolio - oldPortfolio
def getEps(self, time):
if self.isTest :
return 0.0
else:
EPS_STEP = EPS_SLOPE *(self.timeMax - NUM_HISTORY)
#print("EPS STEP : " + str(EPS_STEP))
if time >= EPS_STEP:
return EPS_END
else:
slope = (EPS_END - EPS_START) / (EPS_STEP)
eps = slope * time + EPS_START
return eps
def start():
global brain
global stopSignal
stopSignal = False
brain = Brain()
# Start Actors
actors = [Actor(i,False) for i in range(THREADS)]
for t in actors: t.start()
# Start Critics
opt = Optimizer()
opt.start()
sleep(3600)
stopSignal = True
sleep(5)
#mdTimeMax = 2 * mdTimeMax
#Test Strategy
#print("**TRAINING COMPLETE*********")
testers = [Actor(i,True) for i in range(8)]
results = [0] * 8
for t in testers: t.start()
for t in testers:
t.join()
results[t.id] = str(int(t.R))
return " ".join(results)
#for phase in np.linspace(0, 10*np.pi, 500):
# line1.set_ydata(np.sin(x + phase))
# t = 0
# while t < 1000:
# fig.canvas.flush_events()
# sleep(0.001)
# t+=1
| import numpy as np
np.random.seed(7)
import tensorflow as tf
import datetime
import time
import threading
import math
import random
random.seed(7)
import os
import matplotlib.pyplot as plt
import matplotlib.animation as animation
from keras.models import *
from keras.layers import *
from keras import backend as K
from enum import Enum
from time import sleep
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'
art = """
.d8888b. 888 d88888888888
d88P Y88b 888 d88888 888
888 888 888 d88P888 888
888 888d888888 88888888b. 888888 .d88b. d88P 888 888
888 888P" 888 888888 "88b888 d88""88b d88P 888 888
888 888888 888 888888 888888 888 888 d88P 888 888
Y88b d88P888 Y88b 888888 d88PY88b. Y88..88P d8888888888 888
"Y8888P" 888 "Y8888888888P" "Y888 "Y88P" d88P 8888888888
888888 d88P
Y8b d88P888 d88P
"Y88P" 888 d88P by UmeW
****** Deep AC3 Trader ******
"""
THREADS = 8
np.set_printoptions(linewidth = 500)
# HyperParams
LOSS_V = .5 # v loss coefficient
LOSS_ENTROPY = 0.1 # entropy coefficient
LEARNING_RATE = 1e-4
EPS_START = 0.5
EPS_END = 0.1
EPS_SLOPE = 600
N_STEP_RETURN = 8
MIN_BATCH = 32
NUM_HISTORY = 300
NUM_STATE = 1 * NUM_HISTORY + 1 + 1 + 1 + 1# Scrapped data + (Shares bought?) + (Budget?)
NUM_DENSE = 120
NUM_DENSE2 = 30
GAMMA = 0.99
GAMMA_N = GAMMA ** N_STEP_RETURN
CAN_SHORT = False
NUM_ACTIONS = 3 # Buy = 0 , Sell = 1 , Hold = 2
# States Var
mdPrice = []
mdPriceMin = []
mdPriceMax = []
mdBSRatio = []
mdVolume = []
mdVar = [0] * THREADS
mdMean = [0] * THREADS
mdTimeMax = [0] * THREADS
aHistory = [[] for i in range(THREADS)]
stopSignal = False
testFile = open("result2.test", "a")
print(art)
loadData()
if False :
HP_LOSS_V = [0.5]
HP_LOSS_ENTROPY = [0.01,0.001,0.1,0.5,1,10]
HP_LEARNING_RATE = [1e-4,5e-4,1e-3,1e-2]
HP_EPS_START = [0.5,0.6,0.7,0.4,0.3]
HP_EPS_END = [0.15,0.05,0.25,0.1]
HP_EPS_SLOPE = [10, 15, 5]
HP_N_STEP_RETURN = [8]
HP_MIN_BATCH = [32,64,512]
HP_NUM_HISTORY = [1,2,3]
HP_GAMMA = [0.99]
for loss_v in HP_LOSS_V:
LOSS_V = loss_v
for eps_start in HP_EPS_START:
EPS_START = eps_start
for eps_end in HP_EPS_END:
EPS_END = eps_end
for eps_slope in HP_EPS_SLOPE:
EPS_SLOPE = eps_slope
for n_step_return in HP_N_STEP_RETURN:
N_STEP_RETURN = n_step_return
for min_batch in HP_MIN_BATCH:
MIN_BATCH = min_batch
for num_history in HP_NUM_HISTORY:
NUM_STATE = 1 * NUM_HISTORY + 1 + 1 + 1
HP_NUM_DENSE = [30, 10, 100]
for num_dense in HP_NUM_DENSE:
NUM_DENSE = num_dense
for loss_entropy in HP_LOSS_ENTROPY:
LOSS_ENTROPY = loss_entropy
for learning_rate in HP_LEARNING_RATE:
LEARNING_RATE = learning_rate
for gamma in HP_GAMMA:
GAMMA = gamma
GAMMA_N = GAMMA ** N_STEP_RETURN
result = start()
strin = ("loss_v: " + str(loss_v) +
" | loss_entropy: " + str(loss_entropy) +
" | learning_rate: " + str(learning_rate) +
" | eps_start: " + str(eps_start) +
" | eps_end: " + str(eps_end) +
" | eps_slope: " + str(eps_slope) +
" | n_step_return: " + str(n_step_return) +
" | min_batch: " + str(min_batch) +
" | num_history: " + str(num_history) +
" | num_dense: " + str(num_dense) +
" | result: " + str(result) + "\n"
)
print(strin)
else :
print(start())
testFile.close
plt.ion()
fig = plt.figure()
lines = []
prices = []
for i in range(THREADS):
x = np.arange(NUM_HISTORY - 1, mdTimeMax[i%8], 1)
priceA = mdPrice[i % 8 ][NUM_HISTORY - 1: mdTimeMax[i % 8] ]
priceA = np.array([(x - mdMean[i % 8 ])/ mdVar[i % 8 ] for x in priceA])
prices.append(priceA)
ax = fig.add_subplot(int(THREADS/2), 2, i + 1)
acts = aHistory[i][0]
fill = [-1]*(mdTimeMax[i] - NUM_HISTORY- len(acts) + 1)
actions = np.array( acts + fill ) + 1
beee,line = ax.plot(x, priceA, 'b-', x, actions, 'ro')
lines.append(line)
plt.title(str(i))
fig.canvas.draw()
k = 0
while k < 1000:
for i in range(THREADS):
if k < len(aHistory[i]):
acts = aHistory[i][k]
fill = [-1]*(mdTimeMax[i] - NUM_HISTORY - len(acts) + 1)
actions = np.array( acts + fill ) + 1
else:
acts = aHistory[i][-1]
fill = [-1]*(mdTimeMax[i] - NUM_HISTORY - len(acts) + 1)
actions = np.array( acts + fill ) + 1
lines[i].set_ydata(actions)
k += 1
t = time.time()
while time.time() < t + 1 :
fig.canvas.flush_events()
sleep(0.001)
class Action(Enum):
BUY = 0
SELL = 1
HOLD = 2
def loadData():
j = 0
for j in range(0, 8):
with open('training2/training_'+ str(j) +'.data', 'r') as f:
buf = f.readlines()
mdPrice.append([])
mdPriceMin.append([])
mdPriceMax.append([])
mdBSRatio.append([])
mdVolume.append([])
esp = 0
esp2 = 0
for line in buf: # we should test if everything good at import
dat = line.split(' ')
#>>> t = "2017-12-08 23:22:00 16066.530120481928 16060 16072 38 225691"
#['2017-12-08', '23:22:00', '16066.530120481928', '16060', '16072', '38', '225691']
mdPrice[j].append(float(dat[2]))
esp += float(dat[2])
esp2 += float(dat[2]) ** 2
mdPriceMin[j].append(float(dat[3]))
mdPriceMax[j].append(float(dat[4]))
mdBSRatio[j].append(float(dat[5]))
mdVolume[j].append(float(dat[6]))
mdTimeMax[j] = int(len(buf))
esp = esp / mdTimeMax[j]
esp2 = esp2 / mdTimeMax[j]
mdVar[j] = math.sqrt(esp2 - (esp ** 2))
mdMean[j] = esp
#print(mdVar[j])
class Brain():
def __init__(self):
g = tf.Graph()
SESSION = tf.Session(graph=g)
self.session = SESSION
with g.as_default():
tf.set_random_seed(7)
K.set_session(self.session)
K.manual_variable_initialization(True)
self.model = self.BuildModel()
self.graph = self.BuildGraph()
self.session.run(tf.global_variables_initializer())
self.default_graph = tf.get_default_graph()
#self.default_graph.finalize()
self.buffer = [[], [], [], [], []]
self.lock = threading.Lock()
def BuildModel(self):
l_input = Input(batch_shape=(None, NUM_STATE))
#l_predense = Dense(NUM_DENSE, activation='relu', kernel_regularizer=regularizers.l2(0.01))(l_input)
#l_dense = Dense(NUM_DENSE, activation='relu', kernel_regularizer=regularizers.l2(0.01))(l_predense)
l_predense = Dense(NUM_DENSE, activation='tanh')(l_input)
l_dense = Dense(NUM_DENSE, activation='tanh')(l_predense)
out_actions = Dense(NUM_ACTIONS, activation='softmax')(l_dense)
out_value = Dense(1, activation='linear')(l_dense)
model = Model(inputs=[l_input], outputs=[out_actions, out_value])
model._make_predict_function()
self.intermediateModel = Model(inputs=[l_input], outputs=[l_dense])
self.intermediateModel._make_predict_function()
return model
def BuildGraph(self):
s_t = tf.placeholder(tf.float64, shape=(None, NUM_STATE))
r_t = tf.placeholder(tf.float64, shape=(None, 1)) # r + gamma vs'
a_t = tf.placeholder(tf.float64, shape=(None, NUM_ACTIONS))
p_t, v_t = self.model(s_t)
advantage = r_t - v_t
log_prob = tf.log(tf.reduce_sum(p_t * a_t, axis=1, keep_dims=True) + 1e-10)
loss_policy = - log_prob * tf.stop_gradient(advantage)
loss_value = LOSS_V * tf.square(advantage)
entropy = LOSS_ENTROPY * tf.reduce_sum(p_t * tf.log(p_t + 1e-10), axis=1, keep_dims=True)
loss_total = tf.reduce_mean(loss_policy + loss_value + entropy)
#loss_total = tf.reduce_mean(entropy)
self.loss = loss_total
optimizer = tf.train.RMSPropOptimizer(LEARNING_RATE, decay=.99)
minimize = optimizer.minimize(loss_total)
return s_t, a_t, r_t, minimize
def getPrediction(self, s):
with self.default_graph.as_default():
#print(self.intermediateModel.predict(s))
p, v = self.model.predict(s)
#print(p)
#s_t, a_t, r_t, minimize = self.graph
#k = self.session.run(self.entropy, feed_dict={s_t: s})
#print(k)
return p, v
def getValue(self, s):
with self.default_graph.as_default():
p, v = self.model.predict(s)
return v
def getPolicy(self, s):
with self.default_graph.as_default():
p, v = self.model.predict(s)
return p
def pushTraining(self, action, reward, oldStep, newStep, threadId):
with self.lock:
act = np.zeros(NUM_ACTIONS)
act[action] = 1
self.buffer[0].append(act)
self.buffer[1].append(reward)
self.buffer[2].append(oldStep)
if newStep is None:
self.buffer[3].append(np.zeros(NUM_STATE))
self.buffer[4].append(0)
else:
self.buffer[3].append(newStep)
self.buffer[4].append(1)
def optimize(self):
if len(self.buffer[0]) > MIN_BATCH :
batch = []
with self.lock:
batch = self.buffer
self.buffer = [[], [], [], [], []]
#print(self.threadC)
s_t, a_t, r_t, minimize = self.graph
a = np.vstack(batch[0])
r = np.vstack(batch[1])
s = np.vstack(batch[2])
newStates = np.vstack(batch[3])
newStatesMask = np.vstack(batch[4])
newStatesValue = self.getValue(newStates)
rew = r + newStatesValue * GAMMA_N * newStatesMask
#x = np.hstack([s,a,r,newStatesValue,newStatesMask,rew])
#print(x)
#print(len(s))
#if len(s) > 5*MIN_BATCH: print("Optimizer alert! Minimizing batch of %d" % len(s))
#print("*************************************")
#print(s)
self.session.run(minimize, feed_dict={s_t: s, a_t: a, r_t: rew})
#for i in range(0,100):
# self.session.run(minimize, feed_dict={s_t: s, a_t: a, r_t: rew})
# k = self.session.run(self.loss, feed_dict={s_t: s, a_t: a, r_t: rew})
# print(k)
class Optimizer(threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
def run(self):
while not stopSignal:
sleep(0.001)
brain.optimize()
class Actor(threading.Thread):
def __init__(self, idt, isTest):
threading.Thread.__init__(self)
self.id = idt
self.isTest = isTest
self.steps = 0
self.simCount = 0
#print("Actor " + str(idt) +" created")
def run(self):
#print("Actor " + str(self.id) +" started")
if self.isTest :
self.startSimulation()
else:
while not stopSignal:
sleep(0)
self.startSimulation()
self.simCount += 1
#print(str(self.id) + " : " + str(self.steps))
def normalizeBin(self, value):
#if value > 0:
# return [1]
#else:
# return [-1]
if value > 255 :
return [0.9]*8
r = value
ret = [0.1] * 8
i = 0
while r > 0:
x = (r % 2)*(0.8) + 0.1
ret[i] = x
r = int(r/2)
i +=1
return ret
def startSimulation(self):
if self.isTest:
self.budget = 300
elif self.simCount < 100:
self.budget = random.randint(1000, 5000) *( self.id + 1)
elif self.simCount < 200:
self.budget = random.randint(9 *(100 - self.simCount) + 1000, 45 * (100 - self.simCount) + 5000) *( self.id + 1)
else:
self.budget = 300
self.budget = random.randint(50000, 100000) *( self.id + 1)
self.initbud = self.budget
self.shares = 0
self.mem = [] # (a, t, st, r)
self.r = 0
t = NUM_HISTORY - 1
self.timeMax = mdTimeMax[self.id % 8] #random.randint(NUM_HISTORY , mdTimeMax[self.id % 8])
if self.id == 12:
t = self.simCount % ( mdTimeMax[self.id % 8] - NUM_HISTORY - 50) + NUM_HISTORY - 1
self.timeMax = t + 50
totalSteps = self.timeMax - t
self.R = 0
#print("t " + str(t) +" timemax " + str(mdTimeMax))
actions = []
self.badActions = 0
kill = False
while t < self.timeMax - 1 and not kill:
#st = ([[self.budget] + [self.shares] + mdPrice[t + 1 - NUM_HISTORY: t + 1] + mdPriceMin[t + 1 - NUM_HISTORY: t + 1] +
#mdPriceMax[t + 1 - NUM_HISTORY: t + 1] + mdBSRatio[t + 1 - NUM_HISTORY: t + 1] + mdVolume[t + 1 - NUM_HISTORY: t + 1]])
if self.budget >= mdPrice[self.id % 8][t] :
canBuy = [1]
else :
canBuy = [-1]
priceA = mdPrice[self.id % 8 ][t + 1 - NUM_HISTORY: t + 1]
priceA = [(x - mdMean[self.id % 8 ]) / mdVar[self.id % 8 ] for x in priceA]
st = [[(self.shares * mdPrice[self.id % 8 ][t])/mdVar[self.id % 8] ] + canBuy + [(self.budget )/ mdVar[self.id % 8], mdMean[self.id % 8 ]/mdVar[self.id % 8] ] + priceA]
state = np.array(st)
#if self.isTest : print(state)
a, v = self.getActionValue(state, t)
if self.id == 12:
#if self.isTest: print("self.budget: " + str(self.budget) + " - p: " + str(mdPrice[self.id % 8 ][t]))
if self.budget > mdPrice[self.id % 8 ][t] and mdPrice[self.id % 8 ][t] < mdMean[self.id % 8 ]:
a = 0
elif self.shares > 0 and mdPrice[self.id % 8 ][t] > mdMean[self.id % 8 ]:
a = 1
else :
a = 2
if self.isTest and False:
print("time: " + str(t) + " | price: " + str(mdPrice[self.id][t]))
print("budget: " + str(self.budget) + "| shares:" + str(self.shares))
print("action: " + Action(a).name)
testFile.write(str(mdPrice[self.id][t])+ " "+ str(a)+ "\n")
r = self.act(state, t, a)
self.R += r
# print("reward: " + str(r))
self.pushTraining(a,t,st,r)
#if(t%3 == 0):
while (len(brain.buffer[0]) > MIN_BATCH and not stopSignal) :
sleep(0)
t += 1
self.steps += 1
actions.append(a)
if(self.budget <= 0 and self.shares == 0):
kill = True
ratioComplete = (t*100)/(self.timeMax - 1)
aHistory[self.id].append(actions)
badActionPct = self.badActions
print("Actor " + str(self.id) +" FINISH -- REWARD: " + str(self.R)+ " -- Bad: " + str(badActionPct) + " -- SimC: " + str(self.simCount)+ " -- Comp:%" + str(ratioComplete) )
def pushTraining(self,a,t,st,r):
# a debug
self.mem.append((a,t,st,r))
self.r = (self.r + GAMMA_N * r) / GAMMA
#print("selfR: " + str(self.r))
if t == self.timeMax - 2:
while len(self.mem) > 0:
brain.pushTraining(self.mem[0][0], self.r, self.mem[0][2], None, self.id)
self.r = (self.r - self.mem[0][3])/GAMMA
self.mem.pop(0)
elif len(self.mem) == N_STEP_RETURN :
brain.pushTraining(self.mem[0][0], self.r, self.mem[0][2], st, self.id)
self.r = self.r - self.mem[0][3]
self.mem.pop(0)
#print("NselfR: " + str(self.r))
#print("\n\n")
def getActionValue(self, state, time):
eps = self.getEps(self.steps)
a, v = brain.getPrediction(state)
#print("TIME: " + str(time) + " - EPS: " + str(eps) + " - VAL: " + str(v))
if random.random() < eps:
return random.randint(0, NUM_ACTIONS - 1), v
else:
return np.argmax(a), v
def act(self, state, time, action):
action = Action(action)
oldPortfolio = self.budget + self.shares * mdPrice[self.id % 8][time]
oldBud = self.budget
#print("oldportfolioValue: " + str(oldPortfolio))
if action == Action.BUY and self.budget >= mdPrice[self.id % 8][time]:
self.shares += 1
self.budget -= mdPrice[self.id % 8][time]
elif action == Action.BUY and self.budget < mdPrice[self.id % 8][time]:
self.budget -= mdPrice[self.id % 8][time] * 3
self.badActions += 1
elif action == Action.SELL and (self.shares > 0 or CAN_SHORT):
self.budget += mdPrice[self.id % 8][time] * self.shares
self.shares = 0
elif action == Action.SELL and (self.shares <= 0 and not CAN_SHORT):
self.budget -= mdPrice[self.id % 8 ][time] * 3
self.badActions += 1
#elif action == Action.HOLD and self.shares == 0:
# self.budget -= mdPrice[self.id % 8][time] * 10
# self.badActions += 1
newPortfolio = self.budget + self.shares * mdPrice[self.id % 8][time + 1]
#print("newportfolioValue: " + str(newPortfolio))
#return newPortfolio - oldPortfolio
#return (self.budget - oldBud)/(mdVar[self.id % 8])
return newPortfolio - oldPortfolio
def getEps(self, time):
if self.isTest :
return 0.0
else:
EPS_STEP = EPS_SLOPE *(self.timeMax - NUM_HISTORY)
#print("EPS STEP : " + str(EPS_STEP))
if time >= EPS_STEP:
return EPS_END
else:
slope = (EPS_END - EPS_START) / (EPS_STEP)
eps = slope * time + EPS_START
return eps
def start():
global brain
global stopSignal
stopSignal = False
brain = Brain()
# Start Actors
actors = [Actor(i,False) for i in range(THREADS)]
for t in actors: t.start()
# Start Critics
opt = Optimizer()
opt.start()
sleep(3600)
stopSignal = True
sleep(5)
#mdTimeMax = 2 * mdTimeMax
#Test Strategy
#print("**TRAINING COMPLETE*********")
testers = [Actor(i,True) for i in range(8)]
results = [0] * 8
for t in testers: t.start()
for t in testers:
t.join()
results[t.id] = str(int(t.R))
return " ".join(results)
#for phase in np.linspace(0, 10*np.pi, 500):
# line1.set_ydata(np.sin(x + phase))
# t = 0
# while t < 1000:
# fig.canvas.flush_events()
# sleep(0.001)
# t+=1
| en | 0.343227 | .d8888b. 888 d88888888888
d88P Y88b 888 d88888 888
888 888 888 d88P888 888
888 888d888888 88888888b. 888888 .d88b. d88P 888 888
888 888P" 888 888888 "88b888 d88""88b d88P 888 888
888 888888 888 888888 888888 888 888 d88P 888 888
Y88b d88P888 Y88b 888888 d88PY88b. Y88..88P d8888888888 888
"Y8888P" 888 "Y8888888888P" "Y888 "Y88P" d88P 8888888888
888888 d88P
Y8b d88P888 d88P
"Y88P" 888 d88P by UmeW
****** Deep AC3 Trader ****** # HyperParams # v loss coefficient # entropy coefficient # Scrapped data + (Shares bought?) + (Budget?) # Buy = 0 , Sell = 1 , Hold = 2 # States Var # we should test if everything good at import #>>> t = "2017-12-08 23:22:00 16066.530120481928 16060 16072 38 225691" #['2017-12-08', '23:22:00', '16066.530120481928', '16060', '16072', '38', '225691'] #print(mdVar[j]) #self.default_graph.finalize() #l_predense = Dense(NUM_DENSE, activation='relu', kernel_regularizer=regularizers.l2(0.01))(l_input) #l_dense = Dense(NUM_DENSE, activation='relu', kernel_regularizer=regularizers.l2(0.01))(l_predense) # r + gamma vs' #loss_total = tf.reduce_mean(entropy) #print(self.intermediateModel.predict(s)) #print(p) #s_t, a_t, r_t, minimize = self.graph #k = self.session.run(self.entropy, feed_dict={s_t: s}) #print(k) #print(self.threadC) #x = np.hstack([s,a,r,newStatesValue,newStatesMask,rew]) #print(x) #print(len(s)) #if len(s) > 5*MIN_BATCH: print("Optimizer alert! Minimizing batch of %d" % len(s)) #print("*************************************") #print(s) #for i in range(0,100): # self.session.run(minimize, feed_dict={s_t: s, a_t: a, r_t: rew}) # k = self.session.run(self.loss, feed_dict={s_t: s, a_t: a, r_t: rew}) # print(k) #print("Actor " + str(idt) +" created") #print("Actor " + str(self.id) +" started") #print(str(self.id) + " : " + str(self.steps)) #if value > 0: # return [1] #else: # return [-1] # (a, t, st, r) #random.randint(NUM_HISTORY , mdTimeMax[self.id % 8]) #print("t " + str(t) +" timemax " + str(mdTimeMax)) #st = ([[self.budget] + [self.shares] + mdPrice[t + 1 - NUM_HISTORY: t + 1] + mdPriceMin[t + 1 - NUM_HISTORY: t + 1] + #mdPriceMax[t + 1 - NUM_HISTORY: t + 1] + mdBSRatio[t + 1 - NUM_HISTORY: t + 1] + mdVolume[t + 1 - NUM_HISTORY: t + 1]]) #if self.isTest : print(state) #if self.isTest: print("self.budget: " + str(self.budget) + " - p: " + str(mdPrice[self.id % 8 ][t])) # print("reward: " + str(r)) #if(t%3 == 0): # a debug #print("selfR: " + str(self.r)) #print("NselfR: " + str(self.r)) #print("\n\n") #print("TIME: " + str(time) + " - EPS: " + str(eps) + " - VAL: " + str(v)) #print("oldportfolioValue: " + str(oldPortfolio)) #elif action == Action.HOLD and self.shares == 0: # self.budget -= mdPrice[self.id % 8][time] * 10 # self.badActions += 1 #print("newportfolioValue: " + str(newPortfolio)) #return newPortfolio - oldPortfolio #return (self.budget - oldBud)/(mdVar[self.id % 8]) #print("EPS STEP : " + str(EPS_STEP)) # Start Actors # Start Critics #mdTimeMax = 2 * mdTimeMax #Test Strategy #print("**TRAINING COMPLETE*********") #for phase in np.linspace(0, 10*np.pi, 500): # line1.set_ydata(np.sin(x + phase)) # t = 0 # while t < 1000: # fig.canvas.flush_events() # sleep(0.001) # t+=1 | 2.281875 | 2 |
stograde/process_file/file_result.py | babatana/stograde | 0 | 6619141 | <gh_stars>0
from dataclasses import dataclass, field
from typing import List, TYPE_CHECKING
if TYPE_CHECKING:
from ..process_file.compile_result import CompileResult
from ..process_file.test_result import TestResult
@dataclass
class FileResult:
"""The results from compiling and testing an assignment file"""
file_name: str # Name of the file
contents: str = '' # Contents of the file
compile_results: List['CompileResult'] = field(default_factory=list) # Results of each compilation
test_results: List['TestResult'] = field(default_factory=list) # Results of each test
file_missing: bool = False # Is the file missing
last_modified: str = '' # Last modification date according to git
other_files: List[str] = field(default_factory=list) # Other files in the directory (used if file is missing)
optional: bool = False # Is the file not required to exist
compile_optional: bool = False # Is the file not required to compile
| from dataclasses import dataclass, field
from typing import List, TYPE_CHECKING
if TYPE_CHECKING:
from ..process_file.compile_result import CompileResult
from ..process_file.test_result import TestResult
@dataclass
class FileResult:
"""The results from compiling and testing an assignment file"""
file_name: str # Name of the file
contents: str = '' # Contents of the file
compile_results: List['CompileResult'] = field(default_factory=list) # Results of each compilation
test_results: List['TestResult'] = field(default_factory=list) # Results of each test
file_missing: bool = False # Is the file missing
last_modified: str = '' # Last modification date according to git
other_files: List[str] = field(default_factory=list) # Other files in the directory (used if file is missing)
optional: bool = False # Is the file not required to exist
compile_optional: bool = False # Is the file not required to compile | en | 0.888762 | The results from compiling and testing an assignment file # Name of the file # Contents of the file # Results of each compilation # Results of each test # Is the file missing # Last modification date according to git # Other files in the directory (used if file is missing) # Is the file not required to exist # Is the file not required to compile | 2.876775 | 3 |
core/IELBO.py | rist-ro/argo | 4 | 6619142 | import tensorflow as tf
from tensorflow_probability import distributions as tfd
from argo.core.utils.argo_utils import NUMTOL
from .ELBO import ELBO
class IELBO(ELBO):
def __init__(self, beta=1.0, warm_up_method=None, h=0.01, normalize=1, name = "IELBO"):
super().__init__(beta=beta, warm_up_method=warm_up_method, name = name)
self._h = h
self._normalize = normalize
def create_id(self, cost_fuction_kwargs):
_id = "IELBO_b" + str(cost_fuction_kwargs["beta"]) + "_h" + str(cost_fuction_kwargs["h"]) + "_n" + str(cost_fuction_kwargs["normalize"])
_id += self.create_warm_up_id(cost_fuction_kwargs)
return _id
def _build(self, vae):
self.h = tf.placeholder_with_default(self._h, shape=(), name='half_intergral_interval')
self.normalize = tf.placeholder_with_default(self._normalize, shape=(), name='normalize_integral')
assert(not isinstance(vae._model_visible, tfd.Bernoulli)), "Cannot use the IELBO with discrete distributions for the visible variables"
return super()._build(vae)
def reconstruction_loss(self, x_target, n_z_samples, model_visible):
# with tf.variable_scope('ELBO/reconstruction_loss'):
# no need for ELBO, sonnet module is already adding that, the line above would produce:
# ELBO/ELBO/reconstruction_loss/node_created
with tf.variable_scope('reconstruction_loss'):
# 1) the log_pdf is computed with respect to distribution of the visible
# variables obtained from the target of input of the graph (self.x_target)
# can I avoid replicate? maybe not..
input_shape = x_target.shape.as_list()[1:]
ones = [1] * len(input_shape)
x_replicate = tf.tile(x_target, [n_z_samples] + ones)
# no need to check for the values in the interval, since the cdf is defined over R
upper = model_visible.cdf(x_replicate + self.h)
lower = model_visible.cdf(x_replicate - self.h)
delta = tf.cond(tf.equal(self.normalize, 1),
lambda: 2.0 * self.h + NUMTOL,
lambda: 1.0)
reconstr_loss = -tf.log( (upper-lower) / delta + NUMTOL)
# #before
# reconstr_loss = tf.reshape(reconstr_loss, [n_z_samples, -1]+input_shape)
# all_axis_but_first_2 = list(range(len(reconstr_loss.shape)))[2:]
# #independent p for each input pixel
# log_p = tf.reduce_sum(reconstr_loss, axis=all_axis_but_first_2)
# #average over the samples
# mean_reconstr_loss = tf.reduce_mean(log_p, axis=0)
#now (ready for arbitrary intermediate samplings)
all_axis_but_first = list(range(len(reconstr_loss.shape)))[1:]
#independent p for each input pixel
log_p = tf.reduce_sum(reconstr_loss, axis=all_axis_but_first)
#average over all the samples and the batch (they are both stacked on the axis 0)
mean_reconstr_loss = tf.reduce_mean(log_p, axis=0, name="reconstruction_loss")
# self.log_p_x_z = mean_reconstr_loss
return mean_reconstr_loss
| import tensorflow as tf
from tensorflow_probability import distributions as tfd
from argo.core.utils.argo_utils import NUMTOL
from .ELBO import ELBO
class IELBO(ELBO):
def __init__(self, beta=1.0, warm_up_method=None, h=0.01, normalize=1, name = "IELBO"):
super().__init__(beta=beta, warm_up_method=warm_up_method, name = name)
self._h = h
self._normalize = normalize
def create_id(self, cost_fuction_kwargs):
_id = "IELBO_b" + str(cost_fuction_kwargs["beta"]) + "_h" + str(cost_fuction_kwargs["h"]) + "_n" + str(cost_fuction_kwargs["normalize"])
_id += self.create_warm_up_id(cost_fuction_kwargs)
return _id
def _build(self, vae):
self.h = tf.placeholder_with_default(self._h, shape=(), name='half_intergral_interval')
self.normalize = tf.placeholder_with_default(self._normalize, shape=(), name='normalize_integral')
assert(not isinstance(vae._model_visible, tfd.Bernoulli)), "Cannot use the IELBO with discrete distributions for the visible variables"
return super()._build(vae)
def reconstruction_loss(self, x_target, n_z_samples, model_visible):
# with tf.variable_scope('ELBO/reconstruction_loss'):
# no need for ELBO, sonnet module is already adding that, the line above would produce:
# ELBO/ELBO/reconstruction_loss/node_created
with tf.variable_scope('reconstruction_loss'):
# 1) the log_pdf is computed with respect to distribution of the visible
# variables obtained from the target of input of the graph (self.x_target)
# can I avoid replicate? maybe not..
input_shape = x_target.shape.as_list()[1:]
ones = [1] * len(input_shape)
x_replicate = tf.tile(x_target, [n_z_samples] + ones)
# no need to check for the values in the interval, since the cdf is defined over R
upper = model_visible.cdf(x_replicate + self.h)
lower = model_visible.cdf(x_replicate - self.h)
delta = tf.cond(tf.equal(self.normalize, 1),
lambda: 2.0 * self.h + NUMTOL,
lambda: 1.0)
reconstr_loss = -tf.log( (upper-lower) / delta + NUMTOL)
# #before
# reconstr_loss = tf.reshape(reconstr_loss, [n_z_samples, -1]+input_shape)
# all_axis_but_first_2 = list(range(len(reconstr_loss.shape)))[2:]
# #independent p for each input pixel
# log_p = tf.reduce_sum(reconstr_loss, axis=all_axis_but_first_2)
# #average over the samples
# mean_reconstr_loss = tf.reduce_mean(log_p, axis=0)
#now (ready for arbitrary intermediate samplings)
all_axis_but_first = list(range(len(reconstr_loss.shape)))[1:]
#independent p for each input pixel
log_p = tf.reduce_sum(reconstr_loss, axis=all_axis_but_first)
#average over all the samples and the batch (they are both stacked on the axis 0)
mean_reconstr_loss = tf.reduce_mean(log_p, axis=0, name="reconstruction_loss")
# self.log_p_x_z = mean_reconstr_loss
return mean_reconstr_loss
| en | 0.768047 | # with tf.variable_scope('ELBO/reconstruction_loss'): # no need for ELBO, sonnet module is already adding that, the line above would produce: # ELBO/ELBO/reconstruction_loss/node_created # 1) the log_pdf is computed with respect to distribution of the visible # variables obtained from the target of input of the graph (self.x_target) # can I avoid replicate? maybe not.. # no need to check for the values in the interval, since the cdf is defined over R # #before # reconstr_loss = tf.reshape(reconstr_loss, [n_z_samples, -1]+input_shape) # all_axis_but_first_2 = list(range(len(reconstr_loss.shape)))[2:] # #independent p for each input pixel # log_p = tf.reduce_sum(reconstr_loss, axis=all_axis_but_first_2) # #average over the samples # mean_reconstr_loss = tf.reduce_mean(log_p, axis=0) #now (ready for arbitrary intermediate samplings) #independent p for each input pixel #average over all the samples and the batch (they are both stacked on the axis 0) # self.log_p_x_z = mean_reconstr_loss | 2.228615 | 2 |
app/api/v1/__init__.py | jslay88/flask_boilerplate | 6 | 6619143 | <filename>app/api/v1/__init__.py
from flask import Blueprint
from flask_restx import Api, apidoc
from flask_login import login_required
from sqlalchemy.orm.exc import NoResultFound
from sqlalchemy.exc import IntegrityError
# Namespace import
from .user.endpoints import ns as user_namespace
from .token.endpoints import ns as token_namespace
from .log.endpoints import ns as log_namespace
api_blueprint = Blueprint('api_v1', __name__, url_prefix='/api/v1')
api = Api(api_blueprint, title='Flask Boilerplate API', version='1.0.0')
api.add_namespace(user_namespace)
api.add_namespace(token_namespace)
api.add_namespace(log_namespace)
@api.errorhandler(NoResultFound)
def handle_no_result_exception(error):
"""
Return a custom not found error message and 404 status code
"""
return {'message': str(error)}, 404
@api.errorhandler(IntegrityError)
def handle_integrity_error(error):
"""
Likely a Key Restraint Error.
"""
return {'message': str(error)}, 400
@api.documentation
@login_required
def swagger_ui():
"""
API Docs are only available for logged in users.
:return:
"""
return apidoc.ui_for(api)
| <filename>app/api/v1/__init__.py
from flask import Blueprint
from flask_restx import Api, apidoc
from flask_login import login_required
from sqlalchemy.orm.exc import NoResultFound
from sqlalchemy.exc import IntegrityError
# Namespace import
from .user.endpoints import ns as user_namespace
from .token.endpoints import ns as token_namespace
from .log.endpoints import ns as log_namespace
api_blueprint = Blueprint('api_v1', __name__, url_prefix='/api/v1')
api = Api(api_blueprint, title='Flask Boilerplate API', version='1.0.0')
api.add_namespace(user_namespace)
api.add_namespace(token_namespace)
api.add_namespace(log_namespace)
@api.errorhandler(NoResultFound)
def handle_no_result_exception(error):
"""
Return a custom not found error message and 404 status code
"""
return {'message': str(error)}, 404
@api.errorhandler(IntegrityError)
def handle_integrity_error(error):
"""
Likely a Key Restraint Error.
"""
return {'message': str(error)}, 400
@api.documentation
@login_required
def swagger_ui():
"""
API Docs are only available for logged in users.
:return:
"""
return apidoc.ui_for(api)
| en | 0.4817 | # Namespace import Return a custom not found error message and 404 status code Likely a Key Restraint Error. API Docs are only available for logged in users. :return: | 2.411711 | 2 |
src/aihwkit/inference/converter/base.py | todd-deshane/aihwkit | 133 | 6619144 | <gh_stars>100-1000
# -*- coding: utf-8 -*-
# (C) Copyright 2020, 2021 IBM. All Rights Reserved.
#
# This code is licensed under the Apache License, Version 2.0. You may
# obtain a copy of this license in the LICENSE.txt file in the root directory
# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
#
# Any modifications or derivative works of this code must retain this
# copyright notice, and modified files need to carry a notice indicating
# that they have been altered from the originals.
"""Base conductance converter for the phenomenological noise models for inference."""
from typing import Dict, List, Tuple
from torch import Tensor
from torch.autograd import no_grad
class BaseConductanceConverter:
"""Base class for converting DNN weights into conductances."""
@no_grad()
def convert_to_conductances(self, weights: Tensor) -> Tuple[List[Tensor], Dict]:
"""Convert a weight matrix into conductances.
Caution:
The conversion is assumed deterministic and repeatable.
Args:
weights: weight matrix tensor.
Returns:
Tuple of the list of conductance tensors and a params
dictionary that is used for the reverse conversion.
"""
raise NotImplementedError
@no_grad()
def convert_back_to_weights(self, conductances: List[Tensor], params: Dict) -> Tensor:
"""Convert a matrix of conductances into weights.
Caution:
The conversion is assumed deterministic and repeatable.
Args:
conductances: list of conductance tensors representing a weight matrix
params: param dictionary that was returned from the conversion
Returns:
weight matrix
"""
raise NotImplementedError
| # -*- coding: utf-8 -*-
# (C) Copyright 2020, 2021 IBM. All Rights Reserved.
#
# This code is licensed under the Apache License, Version 2.0. You may
# obtain a copy of this license in the LICENSE.txt file in the root directory
# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
#
# Any modifications or derivative works of this code must retain this
# copyright notice, and modified files need to carry a notice indicating
# that they have been altered from the originals.
"""Base conductance converter for the phenomenological noise models for inference."""
from typing import Dict, List, Tuple
from torch import Tensor
from torch.autograd import no_grad
class BaseConductanceConverter:
"""Base class for converting DNN weights into conductances."""
@no_grad()
def convert_to_conductances(self, weights: Tensor) -> Tuple[List[Tensor], Dict]:
"""Convert a weight matrix into conductances.
Caution:
The conversion is assumed deterministic and repeatable.
Args:
weights: weight matrix tensor.
Returns:
Tuple of the list of conductance tensors and a params
dictionary that is used for the reverse conversion.
"""
raise NotImplementedError
@no_grad()
def convert_back_to_weights(self, conductances: List[Tensor], params: Dict) -> Tensor:
"""Convert a matrix of conductances into weights.
Caution:
The conversion is assumed deterministic and repeatable.
Args:
conductances: list of conductance tensors representing a weight matrix
params: param dictionary that was returned from the conversion
Returns:
weight matrix
"""
raise NotImplementedError | en | 0.876367 | # -*- coding: utf-8 -*- # (C) Copyright 2020, 2021 IBM. All Rights Reserved. # # This code is licensed under the Apache License, Version 2.0. You may # obtain a copy of this license in the LICENSE.txt file in the root directory # of this source tree or at http://www.apache.org/licenses/LICENSE-2.0. # # Any modifications or derivative works of this code must retain this # copyright notice, and modified files need to carry a notice indicating # that they have been altered from the originals. Base conductance converter for the phenomenological noise models for inference. Base class for converting DNN weights into conductances. Convert a weight matrix into conductances. Caution: The conversion is assumed deterministic and repeatable. Args: weights: weight matrix tensor. Returns: Tuple of the list of conductance tensors and a params dictionary that is used for the reverse conversion. Convert a matrix of conductances into weights. Caution: The conversion is assumed deterministic and repeatable. Args: conductances: list of conductance tensors representing a weight matrix params: param dictionary that was returned from the conversion Returns: weight matrix | 2.053103 | 2 |
codes/wsgi_bootstrap.py | dokuboyejo/python-flask-uwsgi-heroku | 0 | 6619145 | <reponame>dokuboyejo/python-flask-uwsgi-heroku
from flask import Flask, jsonify, request
app = Flask(__name__)
@app.route('/', methods=['GET'], strict_slashes=False)
def hello_world():
return jsonify({'info': 'Hello, World!'}), 200
if __name__ == '__main__':
app.run() | from flask import Flask, jsonify, request
app = Flask(__name__)
@app.route('/', methods=['GET'], strict_slashes=False)
def hello_world():
return jsonify({'info': 'Hello, World!'}), 200
if __name__ == '__main__':
app.run() | none | 1 | 2.624039 | 3 | |
tests/compat.py | kibitzr/kibitzr-email | 2 | 6619146 | <filename>tests/compat.py
try:
from unittest import mock
except ImportError:
import mock
__all__ = (
'mock',
)
| <filename>tests/compat.py
try:
from unittest import mock
except ImportError:
import mock
__all__ = (
'mock',
)
| none | 1 | 1.423697 | 1 | |
python/misc/download.py | francois-rozet/adopptrs | 11 | 6619147 | # Imports
import sys
import os
import via as VIA
from PIL import Image
from walonmap import _WALONMAP as wm
from summarize import parse
# Arguments
parser = argparse.ArgumentParser(description='Download images from WalOnMap')
parser.add_argument('-d', '--destination', default='../products/liege/', help='destination of the tiles')
parser.add_argument('-i', '--input', default='../resources/walonmap/via_liege_city.json', help='input VIA file')
args = parser.parse_args()
# Destination
os.makedirs(args.destination, exist_ok=True)
# Download
via = VIA.load(args.input)
for imagename in via:
row, col = parse(imagename)
img = Image.open(wm.get_tile(row, col))
img.save(os.path.join(args.destination, imagename))
| # Imports
import sys
import os
import via as VIA
from PIL import Image
from walonmap import _WALONMAP as wm
from summarize import parse
# Arguments
parser = argparse.ArgumentParser(description='Download images from WalOnMap')
parser.add_argument('-d', '--destination', default='../products/liege/', help='destination of the tiles')
parser.add_argument('-i', '--input', default='../resources/walonmap/via_liege_city.json', help='input VIA file')
args = parser.parse_args()
# Destination
os.makedirs(args.destination, exist_ok=True)
# Download
via = VIA.load(args.input)
for imagename in via:
row, col = parse(imagename)
img = Image.open(wm.get_tile(row, col))
img.save(os.path.join(args.destination, imagename))
| en | 0.353678 | # Imports # Arguments # Destination # Download | 3.065235 | 3 |
ex010_2.py | BrunosVieira88/Python | 0 | 6619148 | reais = float(input("Digite o valor em reais "))
dolar = reais / 5.75
print("O valor digitado em reais de {:.2f} é equivalente em dolars a {:.2f} ".format(reais,dolar)) | reais = float(input("Digite o valor em reais "))
dolar = reais / 5.75
print("O valor digitado em reais de {:.2f} é equivalente em dolars a {:.2f} ".format(reais,dolar)) | none | 1 | 3.739206 | 4 | |
mcts.py | ZhenghaoFei/mcts_maze | 0 | 6619149 | import numpy as np
import copy
from mcts_utils import StateNode, StateActionNode, print_tree
class Mcts(object):
"""
Monte Carlo Tree Search Planning Method
Normal version
For finite action space and finite state space
"""
def __init__(self, env, exploration_parameter, default_policy_fn, model_fn):
self.cp = exploration_parameter
self.default_policy_fn = default_policy_fn
self.env = env
self.model_fn = model_fn
self.select_action = self.select_action_uct
def run(self, st, rollout_times, debug=False):
root_node = StateNode(st, parent=None, depth=0, reward=0, done=False)
# grow the tree for N times
for t in range(rollout_times):
self.grow_tree(root_node)
action = self.best_action(root_node)
if debug:
print_tree(root_node)
self.env.load_states(root_node.state)
root_value = root_node.value()
return action, root_value
def best_action(self, node):
if node.num_children() == 0:
print("no child in root node")
action = self.default_policy_fn.get_action(node.state)
else:
qs = []
acs = []
for child in node.children:
q = child.value()
qs.append(q)
acs.append(child.action)
qs = np.asarray(qs)
best_q_idx = np.argmax(qs)
action = acs[best_q_idx]
return action
def select_action_random(self, state_node):
action = np.random.randint(self.env.ACTION_DIM)
return action
def select_action_uct(self, state_node):
# select action according to uct value
best_action = self.default_policy_fn.get_action(state_node.state)
best_q = -np.inf
for action in range(self.env.ACTION_DIM):
sa_node, exist = state_node.find_child(action)
if not exist:
value = np.inf
else:
# print("value", sa_node.value())
# print("cp", self.cp * np.sqrt(np.log(state_node.visited_times)/sa_node.visited_times))
value = sa_node.value() + self.cp * np.sqrt(np.log(state_node.visited_times)/sa_node.visited_times)
if value > best_q:
best_action = action
best_q = value
return best_action
def aggregate_sa_node(self, state_node, action):
new_sa_node, exist = state_node.find_child(action)
if not exist:
new_sa_node = StateActionNode(state_node.state, action, parent=state_node, depth=state_node.depth+1)
state_node.append_child(new_sa_node)
return new_sa_node
def expansion(self, leaf_state_node):
action = self.default_policy_fn.get_action(leaf_state_node.state)
new_sa_node = StateActionNode(leaf_state_node.state, action, parent=leaf_state_node, depth=leaf_state_node.depth+1)
leaf_state_node.append_child(new_sa_node)
state_nxt, reward, done = self.model_fn.step(leaf_state_node.state, action)
new_s_node = StateNode(state_nxt, parent=new_sa_node, depth=new_sa_node.depth+1., reward=reward, done=done)
new_sa_node.append_child(new_s_node)
new_sa_node.reward += reward
return new_s_node, done
def select_outcome(self, sa_node):
# normal version
state_nxt, reward, done = self.model_fn.step(sa_node.state, sa_node.action)
decision_node, exist = sa_node.find_child(state_nxt)
if not exist:
decision_node = StateNode(state_nxt, parent=sa_node, depth=sa_node.depth+1, reward=reward, done=done)
sa_node.append_child(decision_node)
sa_node.reward += decision_node.reward
return decision_node, done
def back_propogation(self, current_s_node, cumulative_reward):
# backward phase
while True:
current_s_node.visited_times += 1
current_s_node.cumulative_reward += cumulative_reward
if current_s_node.parent == None:
break
current_sa_node = current_s_node.parent
current_sa_node.visited_times += 1
cumulative_reward += current_sa_node.reward_mean()
current_sa_node.cumulative_reward += cumulative_reward
current_s_node = current_sa_node.parent
def grow_tree(self, root_node):
current_s_node = root_node
# forward phase
while True:
# select action add a (s,a) node into tree
action = self.select_action(current_s_node)
new_sa_node = self.aggregate_sa_node(current_s_node, action)
# model generate next state add a (s) node into tree
current_s_node, done = self.select_outcome(new_sa_node)
if current_s_node.visited_times == 0 or current_s_node.num_children() == 0:
if not done:
current_s_node, done = self.expansion(current_s_node)
break
if not done:
cumulative_reward = self.eval(current_s_node)
else:
cumulative_reward = 0.
self.back_propogation(current_s_node, cumulative_reward)
def eval(self, current_s_node, max_horizon=10):
horizon = 0
cumulative_reward = 0
while True:
horizon += 1
action = self.default_policy_fn.get_action(current_s_node.state)
state_nxt, reward, done = self.model_fn.step(current_s_node.state, action)
cumulative_reward += reward
if done or horizon > max_horizon:
break
# print("cumulative_reward: ", cumulative_reward)
return cumulative_reward
class MctsSwp(Mcts):
"""
Monte Carlo Tree Search Planning Method
Single Progressive Widening
For infinite action space finite state space
"""
def __init__(self, env, exploration_parameter, alpha, default_policy_fn, model_fn):
self.cp = exploration_parameter
self.alpha = alpha
self.default_policy_fn = default_policy_fn
self.env = env
self.model_fn = model_fn
self.select_action = self.select_action_swp
def select_action_swp(self, state_node):
# select action single progressive widening
# print("state_node.visited_times)**alpha", (state_node.visited_times)**alpha)
# print("state_node.num_children()", state_node.num_children())
if (state_node.visited_times)**self.alpha > state_node.num_children():
action = self.default_policy_fn.get_action(state_node.state)
new_sa_node, exist = state_node.find_child(action)
if not exist:
new_sa_node = StateActionNode(state_node.state, action, parent=state_node, depth=state_node.depth+1)
state_node.append_child(new_sa_node)
else:
action = self.select_action_uct(state_node)
return action
class MctsDwp(MctsSwp):
"""
Monte Carlo Tree Search Planning Method
Double Progressive Widening
For infinite action space finite state space
"""
def __init__(self, env, exploration_parameter, alpha, beta, default_policy_fn, model_fn):
self.cp = exploration_parameter
self.alpha = alpha
self.beta = beta
self.default_policy_fn = default_policy_fn
self.env = env
self.model_fn = model_fn
self.select_action = self.select_action_swp
def choose_decision_node(self, sa_node):
logits = []
decision_nodes = []
for child in sa_node.children:
logits.append(child.visited_times)
decision_nodes.append(child)
logits = np.asarray(logits, dtype=np.float)
logits = logits/np.sum(logits) # normalize
node_idx = np.random.choice(sa_node.num_children(), p=logits)
decision_node = decision_nodes[node_idx]
return decision_node
def select_outcome(self, sa_node):
# double progressive widening version
if (sa_node.visited_times)**self.beta >= sa_node.num_children():
state_nxt, reward, done = self.model_fn.step(sa_node.state, sa_node.action)
decision_node, exist = sa_node.find_child(state_nxt)
if not exist:
decision_node = StateNode(state_nxt, parent=sa_node, depth=sa_node.depth+1, reward=reward, done=done)
sa_node.append_child(decision_node)
else:
decision_node = self.choose_decision_node(sa_node)
done = decision_node.done
sa_node.reward += decision_node.reward
return decision_node, done
| import numpy as np
import copy
from mcts_utils import StateNode, StateActionNode, print_tree
class Mcts(object):
"""
Monte Carlo Tree Search Planning Method
Normal version
For finite action space and finite state space
"""
def __init__(self, env, exploration_parameter, default_policy_fn, model_fn):
self.cp = exploration_parameter
self.default_policy_fn = default_policy_fn
self.env = env
self.model_fn = model_fn
self.select_action = self.select_action_uct
def run(self, st, rollout_times, debug=False):
root_node = StateNode(st, parent=None, depth=0, reward=0, done=False)
# grow the tree for N times
for t in range(rollout_times):
self.grow_tree(root_node)
action = self.best_action(root_node)
if debug:
print_tree(root_node)
self.env.load_states(root_node.state)
root_value = root_node.value()
return action, root_value
def best_action(self, node):
if node.num_children() == 0:
print("no child in root node")
action = self.default_policy_fn.get_action(node.state)
else:
qs = []
acs = []
for child in node.children:
q = child.value()
qs.append(q)
acs.append(child.action)
qs = np.asarray(qs)
best_q_idx = np.argmax(qs)
action = acs[best_q_idx]
return action
def select_action_random(self, state_node):
action = np.random.randint(self.env.ACTION_DIM)
return action
def select_action_uct(self, state_node):
# select action according to uct value
best_action = self.default_policy_fn.get_action(state_node.state)
best_q = -np.inf
for action in range(self.env.ACTION_DIM):
sa_node, exist = state_node.find_child(action)
if not exist:
value = np.inf
else:
# print("value", sa_node.value())
# print("cp", self.cp * np.sqrt(np.log(state_node.visited_times)/sa_node.visited_times))
value = sa_node.value() + self.cp * np.sqrt(np.log(state_node.visited_times)/sa_node.visited_times)
if value > best_q:
best_action = action
best_q = value
return best_action
def aggregate_sa_node(self, state_node, action):
new_sa_node, exist = state_node.find_child(action)
if not exist:
new_sa_node = StateActionNode(state_node.state, action, parent=state_node, depth=state_node.depth+1)
state_node.append_child(new_sa_node)
return new_sa_node
def expansion(self, leaf_state_node):
action = self.default_policy_fn.get_action(leaf_state_node.state)
new_sa_node = StateActionNode(leaf_state_node.state, action, parent=leaf_state_node, depth=leaf_state_node.depth+1)
leaf_state_node.append_child(new_sa_node)
state_nxt, reward, done = self.model_fn.step(leaf_state_node.state, action)
new_s_node = StateNode(state_nxt, parent=new_sa_node, depth=new_sa_node.depth+1., reward=reward, done=done)
new_sa_node.append_child(new_s_node)
new_sa_node.reward += reward
return new_s_node, done
def select_outcome(self, sa_node):
# normal version
state_nxt, reward, done = self.model_fn.step(sa_node.state, sa_node.action)
decision_node, exist = sa_node.find_child(state_nxt)
if not exist:
decision_node = StateNode(state_nxt, parent=sa_node, depth=sa_node.depth+1, reward=reward, done=done)
sa_node.append_child(decision_node)
sa_node.reward += decision_node.reward
return decision_node, done
def back_propogation(self, current_s_node, cumulative_reward):
# backward phase
while True:
current_s_node.visited_times += 1
current_s_node.cumulative_reward += cumulative_reward
if current_s_node.parent == None:
break
current_sa_node = current_s_node.parent
current_sa_node.visited_times += 1
cumulative_reward += current_sa_node.reward_mean()
current_sa_node.cumulative_reward += cumulative_reward
current_s_node = current_sa_node.parent
def grow_tree(self, root_node):
current_s_node = root_node
# forward phase
while True:
# select action add a (s,a) node into tree
action = self.select_action(current_s_node)
new_sa_node = self.aggregate_sa_node(current_s_node, action)
# model generate next state add a (s) node into tree
current_s_node, done = self.select_outcome(new_sa_node)
if current_s_node.visited_times == 0 or current_s_node.num_children() == 0:
if not done:
current_s_node, done = self.expansion(current_s_node)
break
if not done:
cumulative_reward = self.eval(current_s_node)
else:
cumulative_reward = 0.
self.back_propogation(current_s_node, cumulative_reward)
def eval(self, current_s_node, max_horizon=10):
horizon = 0
cumulative_reward = 0
while True:
horizon += 1
action = self.default_policy_fn.get_action(current_s_node.state)
state_nxt, reward, done = self.model_fn.step(current_s_node.state, action)
cumulative_reward += reward
if done or horizon > max_horizon:
break
# print("cumulative_reward: ", cumulative_reward)
return cumulative_reward
class MctsSwp(Mcts):
"""
Monte Carlo Tree Search Planning Method
Single Progressive Widening
For infinite action space finite state space
"""
def __init__(self, env, exploration_parameter, alpha, default_policy_fn, model_fn):
self.cp = exploration_parameter
self.alpha = alpha
self.default_policy_fn = default_policy_fn
self.env = env
self.model_fn = model_fn
self.select_action = self.select_action_swp
def select_action_swp(self, state_node):
# select action single progressive widening
# print("state_node.visited_times)**alpha", (state_node.visited_times)**alpha)
# print("state_node.num_children()", state_node.num_children())
if (state_node.visited_times)**self.alpha > state_node.num_children():
action = self.default_policy_fn.get_action(state_node.state)
new_sa_node, exist = state_node.find_child(action)
if not exist:
new_sa_node = StateActionNode(state_node.state, action, parent=state_node, depth=state_node.depth+1)
state_node.append_child(new_sa_node)
else:
action = self.select_action_uct(state_node)
return action
class MctsDwp(MctsSwp):
"""
Monte Carlo Tree Search Planning Method
Double Progressive Widening
For infinite action space finite state space
"""
def __init__(self, env, exploration_parameter, alpha, beta, default_policy_fn, model_fn):
self.cp = exploration_parameter
self.alpha = alpha
self.beta = beta
self.default_policy_fn = default_policy_fn
self.env = env
self.model_fn = model_fn
self.select_action = self.select_action_swp
def choose_decision_node(self, sa_node):
logits = []
decision_nodes = []
for child in sa_node.children:
logits.append(child.visited_times)
decision_nodes.append(child)
logits = np.asarray(logits, dtype=np.float)
logits = logits/np.sum(logits) # normalize
node_idx = np.random.choice(sa_node.num_children(), p=logits)
decision_node = decision_nodes[node_idx]
return decision_node
def select_outcome(self, sa_node):
# double progressive widening version
if (sa_node.visited_times)**self.beta >= sa_node.num_children():
state_nxt, reward, done = self.model_fn.step(sa_node.state, sa_node.action)
decision_node, exist = sa_node.find_child(state_nxt)
if not exist:
decision_node = StateNode(state_nxt, parent=sa_node, depth=sa_node.depth+1, reward=reward, done=done)
sa_node.append_child(decision_node)
else:
decision_node = self.choose_decision_node(sa_node)
done = decision_node.done
sa_node.reward += decision_node.reward
return decision_node, done
| en | 0.534333 | Monte Carlo Tree Search Planning Method Normal version For finite action space and finite state space # grow the tree for N times # select action according to uct value # print("value", sa_node.value()) # print("cp", self.cp * np.sqrt(np.log(state_node.visited_times)/sa_node.visited_times)) # normal version # backward phase # forward phase # select action add a (s,a) node into tree # model generate next state add a (s) node into tree # print("cumulative_reward: ", cumulative_reward) Monte Carlo Tree Search Planning Method Single Progressive Widening For infinite action space finite state space # select action single progressive widening # print("state_node.visited_times)**alpha", (state_node.visited_times)**alpha) # print("state_node.num_children()", state_node.num_children()) Monte Carlo Tree Search Planning Method Double Progressive Widening For infinite action space finite state space # normalize # double progressive widening version | 2.628566 | 3 |
examples/examples_meadowsdata.py | kjamison/nsdcode | 3 | 6619150 | import os
import numpy as np
from scipy.spatial.distance import pdist
from scipy.stats import spearmanr
from mapdata.nsd_datalocation import nsd_datalocation
from meadows.meadows import (get_matask,
get_dragrate,
load_json_data,
meadows_subjects)
# establish where the behavioural data is in the data directories
base_path = os.path.join('/media', 'charesti-start', 'data', 'NSD')
data_dir = nsd_datalocation(base_path=base_path, dir0='behaviour')
# let's get the final RDM from the multiple arrangements task
rdm, stims = get_matask(data_dir, 'subj01')
# let's get the dragrate data for 'arousal'
arousal, a_conf, a_stims = get_dragrate(data_dir, 'subj01', task='arousal')
# let's limit to the special 100
arousal_100 = np.asarray(
[arousal[i] for i, stim in enumerate(a_stims) if stim in stims])
# let's get the dragrate data for 'valence'
valence, v_conf, v_stims = get_dragrate(data_dir, 'subj01', task='valence')
valence_100 = np.asarray(
[valence[i] for i, stim in enumerate(v_stims) if stim in stims])
# let's make a quick affective model
# here we assume that affect is summarised by a 2-dimensional
# space (arousal and valence) and each stimulus is a point in this space.
# to measure the distance between pairs stimuli, for each pair, we simply
# measure the euclidean distance between the coordinates of the 2 items
# in the pair.
affect = np.c_[arousal_100, valence_100]
affect_rdm = pdist(affect, metric='euclidean')
# is the MA and affect correlated?
rel = spearmanr(rdm, affect_rdm)
# ok now let's see what else in the json
data = load_json_data(data_dir)
subj_key = meadows_subjects('subj01')[0]
for k in data[subj_key]['tasks']:
print(k['task'])
# the submodules demoed above show you how to easily access
# the task data.
| import os
import numpy as np
from scipy.spatial.distance import pdist
from scipy.stats import spearmanr
from mapdata.nsd_datalocation import nsd_datalocation
from meadows.meadows import (get_matask,
get_dragrate,
load_json_data,
meadows_subjects)
# establish where the behavioural data is in the data directories
base_path = os.path.join('/media', 'charesti-start', 'data', 'NSD')
data_dir = nsd_datalocation(base_path=base_path, dir0='behaviour')
# let's get the final RDM from the multiple arrangements task
rdm, stims = get_matask(data_dir, 'subj01')
# let's get the dragrate data for 'arousal'
arousal, a_conf, a_stims = get_dragrate(data_dir, 'subj01', task='arousal')
# let's limit to the special 100
arousal_100 = np.asarray(
[arousal[i] for i, stim in enumerate(a_stims) if stim in stims])
# let's get the dragrate data for 'valence'
valence, v_conf, v_stims = get_dragrate(data_dir, 'subj01', task='valence')
valence_100 = np.asarray(
[valence[i] for i, stim in enumerate(v_stims) if stim in stims])
# let's make a quick affective model
# here we assume that affect is summarised by a 2-dimensional
# space (arousal and valence) and each stimulus is a point in this space.
# to measure the distance between pairs stimuli, for each pair, we simply
# measure the euclidean distance between the coordinates of the 2 items
# in the pair.
affect = np.c_[arousal_100, valence_100]
affect_rdm = pdist(affect, metric='euclidean')
# is the MA and affect correlated?
rel = spearmanr(rdm, affect_rdm)
# ok now let's see what else in the json
data = load_json_data(data_dir)
subj_key = meadows_subjects('subj01')[0]
for k in data[subj_key]['tasks']:
print(k['task'])
# the submodules demoed above show you how to easily access
# the task data.
| en | 0.876644 | # establish where the behavioural data is in the data directories # let's get the final RDM from the multiple arrangements task # let's get the dragrate data for 'arousal' # let's limit to the special 100 # let's get the dragrate data for 'valence' # let's make a quick affective model # here we assume that affect is summarised by a 2-dimensional # space (arousal and valence) and each stimulus is a point in this space. # to measure the distance between pairs stimuli, for each pair, we simply # measure the euclidean distance between the coordinates of the 2 items # in the pair. # is the MA and affect correlated? # ok now let's see what else in the json # the submodules demoed above show you how to easily access # the task data. | 2.321189 | 2 |