hexsha
stringlengths 40
40
| size
int64 5
1.05M
| ext
stringclasses 98
values | lang
stringclasses 21
values | max_stars_repo_path
stringlengths 3
945
| max_stars_repo_name
stringlengths 4
118
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
368k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
945
| max_issues_repo_name
stringlengths 4
118
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
134k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
945
| max_forks_repo_name
stringlengths 4
135
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 5
1.05M
| avg_line_length
float64 1
1.03M
| max_line_length
int64 2
1.03M
| alphanum_fraction
float64 0
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
6b76cb67825f4d25d4171836358bebe6a79c9da1
| 615
|
js
|
JavaScript
|
dist/scripts/Services/Cookies.js
|
leogoesger/bloc-frontend-project-starter
|
dd6e0033c69def20fc513e9a5274dd1be42d40e1
|
[
"Apache-2.0"
] | null | null | null |
dist/scripts/Services/Cookies.js
|
leogoesger/bloc-frontend-project-starter
|
dd6e0033c69def20fc513e9a5274dd1be42d40e1
|
[
"Apache-2.0"
] | null | null | null |
dist/scripts/Services/Cookies.js
|
leogoesger/bloc-frontend-project-starter
|
dd6e0033c69def20fc513e9a5274dd1be42d40e1
|
[
"Apache-2.0"
] | null | null | null |
(function() {
function Cookies($rootScope, $cookies, $uibModal) {
$cookies.remove('blocChatCurrentUser');
var currentUser = $cookies.get('blocChatCurrentUser');
console.log(new Date)
if (!currentUser || currentUser === '') {
$rootScope.modal = $uibModal.open({
templateUrl: '/templates/modal.html',
controller: 'ModalCtrl',
controllerAs: 'modal',
size: 'sm',
backdrop: 'static',
keyboard: 'false'
});
}
return Cookies;
}
angular
.module('blocChat')
.run(['$rootScope', '$cookies', '$uibModal', Cookies]);
})();
| 25.625
| 59
| 0.577236
|
7ee5754e5db7ba8ab34642b3f60cc2cc492bc10c
| 102
|
rb
|
Ruby
|
benchmark/bm_hash_shift_string.rb
|
eiko/opal
|
b6bf7992a6cbffc0fb80e4afdc31d470001c734d
|
[
"MIT"
] | 2,849
|
2015-01-01T04:53:23.000Z
|
2022-03-30T12:00:20.000Z
|
benchmark/bm_hash_shift_string.rb
|
eiko/opal
|
b6bf7992a6cbffc0fb80e4afdc31d470001c734d
|
[
"MIT"
] | 1,456
|
2015-01-01T22:40:04.000Z
|
2022-03-31T08:04:20.000Z
|
benchmark/bm_hash_shift_string.rb
|
eiko/opal
|
b6bf7992a6cbffc0fb80e4afdc31d470001c734d
|
[
"MIT"
] | 312
|
2015-01-06T17:50:48.000Z
|
2022-03-25T01:26:49.000Z
|
h = {}
10_000.times do |i|
h[i.to_s] = nil
end
1_000_000.times do
k, v = h.shift
h[k] = v
end
| 9.272727
| 19
| 0.558824
|
5e2fea7a42ba9c29921cf867e6d71765d0d406ff
| 1,980
|
rb
|
Ruby
|
lib/rails_admin_map_field/rails_admin/config/fields/types/map.rb
|
osorubeki-fujita/rails_admin_map_field_clone
|
4fe5f3f5aa41708710850853bbcaef46e3e612be
|
[
"MIT",
"Unlicense"
] | null | null | null |
lib/rails_admin_map_field/rails_admin/config/fields/types/map.rb
|
osorubeki-fujita/rails_admin_map_field_clone
|
4fe5f3f5aa41708710850853bbcaef46e3e612be
|
[
"MIT",
"Unlicense"
] | null | null | null |
lib/rails_admin_map_field/rails_admin/config/fields/types/map.rb
|
osorubeki-fujita/rails_admin_map_field_clone
|
4fe5f3f5aa41708710850853bbcaef46e3e612be
|
[
"MIT",
"Unlicense"
] | null | null | null |
module RailsAdmin::Config::Fields::Types
class Map < RailsAdmin::Config::Fields::Base
RailsAdmin::Config::Fields::Types::register(:map, self)
def allowed_methods
[@name, longitude_field]
end
# THe name of the corresponding longitude field to match the latitude field
# in this object.
register_instance_option(:longitude_field) do
:longitude
end
register_instance_option(:partial) do
:form_map
end
# Language of the map
register_instance_option(:language) do
:en
end
# Google Maps API Key - optional
register_instance_option(:google_api_key) do
nil
end
# Latitude value to display in the map if the latitude attribute is nil
# (Otherwise the location defaults to (0,0) which is in the Gulf of Guinea
register_instance_option(:default_latitude) do
51.5 # Latitude of London, United Kingdom
end
# Longitude value to display if the longitude attribute is nil
register_instance_option(:default_longitude) do
-0.126 # Longitude of London, United Kingdom
end
# Default zoom level of the map
register_instance_option(:default_zoom_level) do
8
end
def dom_name
@dom_name ||= "#{bindings[:form].object_name}_#{@name}_#{longitude_field}"
end
def latitude_dom_name
@lat_dom_name ||= "#{bindings[:form].object_name}_#{@name}"
end
def longitude_dom_name
@lon_dom_name ||= "#{bindings[:form].object_name}_#{longitude_field}"
end
def latitude_of_object
bindings[:object].send(name)
end
def longitude_of_object
bindings[:object].send(longitude_field)
end
[ :latitude, :longitude ].each do | attribute |
eval <<-DEF
def #{ attribute }_on_init
#{ attribute }_of_object || default_#{ attribute }
end
DEF
end
def has_coordinates?
[ latitude_of_object, longitude_of_object ].all?(&:present?)
end
end
end
| 25.384615
| 80
| 0.668687
|
93e23125007edc5bd70e4b2830e76a4ee8526e57
| 2,106
|
cs
|
C#
|
Xamarin.Plugin.Firebase.RemoteConfig.Abstractions/IFirebaseRemoteConfig.cs
|
Px7-941/Xamarin.Plugin.Firebase.RemoteConf
|
6b6c522a5278b26c38fce7a623b0bb1f20e8655b
|
[
"MIT"
] | 9
|
2019-02-21T07:26:40.000Z
|
2021-07-20T22:41:51.000Z
|
Xamarin.Plugin.Firebase.RemoteConfig.Abstractions/IFirebaseRemoteConfig.cs
|
MRomeror/Xamarin.Plugin.Firebase.RemoteConfig
|
d6566fe6f15c6c2cba416a4978e6600fe0f23f4d
|
[
"MIT"
] | 5
|
2018-10-25T11:16:32.000Z
|
2020-10-07T14:25:31.000Z
|
Xamarin.Plugin.Firebase.RemoteConfig.Abstractions/IFirebaseRemoteConfig.cs
|
MRomeror/Xamarin.Plugin.Firebase.RemoteConfig
|
d6566fe6f15c6c2cba416a4978e6600fe0f23f4d
|
[
"MIT"
] | 5
|
2019-09-17T10:44:09.000Z
|
2021-02-01T08:25:20.000Z
|
using System;
using System.Collections.Generic;
using System.Threading.Tasks;
namespace Xamarin.Plugin.FirebaseRemoteConfig {
public interface IFirebaseRemoteConfig {
/// <summary>
/// Initializes the service.
/// </summary>
/// <param name="defaultConfigResourceName">If set, load defaults from this resource</param>
/// <param name="developerModeEnabled">If set to <c>true</c> developer mode is enabled.</param>
void Init(string defaultConfigResourceName = null, bool developerModeEnabled = false);
/// <summary>
/// Initializes the service without default config.
/// </summary>
/// <param name="developerModeEnabled">If set to <c>true</c> developer mode is enabled.</param>
void Init(bool developerModeEnabled = false);
/// <summary>
/// Fetchs the remote config.
/// </summary>
/// <param name="cacheExpiration">Cache expiration in seconds.</param>
/// <exception cref="FirebaseRemoteConfigFetchFailedException">when fetch fails.</exception>
Task FetchAsync(long cacheExpiration);
/// <summary>
/// Activates the last fetched config.
/// </summary>
void ActivateFetched();
/// <summary>
/// Gets the value with specified key as string.
/// </summary>
string GetString(string key);
/// <summary>
/// Gets the value with specified key as byte array.
/// </summary>
byte[] GetBytes(string key);
/// <summary>
/// Gets the value with specified key as boolean.
/// </summary>
bool GetBool(string key);
/// <summary>
/// Gets the value with specified key as long.
/// </summary>
long GetLong(string key);
/// <summary>
/// Gets the value with specified key as double.
/// </summary>
double GetDouble(string key);
/// <summary>
/// Gets all keys by prefix.
/// </summary>
ICollection<string> GetKeysByPrefix(string prefix);
}
}
| 32.90625
| 103
| 0.593067
|
b44f4023bd054d1f7ecb7618375d0d0bd4725b87
| 252
|
rb
|
Ruby
|
features/support/dsl/assertions.rb
|
port80labs/git-pivotal
|
df38a0018c2fe2d7de801dd7dd155e3a40305a62
|
[
"MIT"
] | null | null | null |
features/support/dsl/assertions.rb
|
port80labs/git-pivotal
|
df38a0018c2fe2d7de801dd7dd155e3a40305a62
|
[
"MIT"
] | null | null | null |
features/support/dsl/assertions.rb
|
port80labs/git-pivotal
|
df38a0018c2fe2d7de801dd7dd155e3a40305a62
|
[
"MIT"
] | null | null | null |
module GitPivotal
module FeatureHelpers
module Assertions
def assert_card_is_started(id)
pivotal_project.stories.find(id).current_state.should eq("started")
end
end
end
end
World(GitPivotal::FeatureHelpers::Assertions)
| 21
| 75
| 0.738095
|
ac77ad2c59353f14fd5310451a6fa8e2a4701f1c
| 327
|
kt
|
Kotlin
|
app/src/main/java/com/madonnaapps/wodnotify/common/extensions/ActivityExtensions.kt
|
tjmadonna/wod-notify
|
4163c1176c02415e8d8f38c6ee15ae5f812df97a
|
[
"Apache-2.0"
] | null | null | null |
app/src/main/java/com/madonnaapps/wodnotify/common/extensions/ActivityExtensions.kt
|
tjmadonna/wod-notify
|
4163c1176c02415e8d8f38c6ee15ae5f812df97a
|
[
"Apache-2.0"
] | null | null | null |
app/src/main/java/com/madonnaapps/wodnotify/common/extensions/ActivityExtensions.kt
|
tjmadonna/wod-notify
|
4163c1176c02415e8d8f38c6ee15ae5f812df97a
|
[
"Apache-2.0"
] | null | null | null |
package com.madonnaapps.wodnotify.common.extensions
import android.app.Activity
import com.madonnaapps.wodnotify.WodApplication
import com.madonnaapps.wodnotify.di.AppComponent
/**
* The application component used by Dagger
*/
val Activity.appComponent: AppComponent
get() = (application as WodApplication).appComponent
| 29.727273
| 56
| 0.819572
|
c42a95ae09e4e4ec894bc0e23fbad602c769659c
| 3,660
|
cc
|
C++
|
llvm-gcc-4.2-2.9/libstdc++-v3/testsuite/27_io/basic_istream/getline/wchar_t/1.cc
|
vidkidz/crossbridge
|
ba0bf94aee0ce6cf7eb5be882382e52bc57ba396
|
[
"MIT"
] | 1
|
2016-04-09T02:58:13.000Z
|
2016-04-09T02:58:13.000Z
|
llvm-gcc-4.2-2.9/libstdc++-v3/testsuite/27_io/basic_istream/getline/wchar_t/1.cc
|
vidkidz/crossbridge
|
ba0bf94aee0ce6cf7eb5be882382e52bc57ba396
|
[
"MIT"
] | null | null | null |
llvm-gcc-4.2-2.9/libstdc++-v3/testsuite/27_io/basic_istream/getline/wchar_t/1.cc
|
vidkidz/crossbridge
|
ba0bf94aee0ce6cf7eb5be882382e52bc57ba396
|
[
"MIT"
] | null | null | null |
// Copyright (C) 2004 Free Software Foundation
//
// This file is part of the GNU ISO C++ Library. This library is free
// software; you can redistribute it and/or modify it under the
// terms of the GNU General Public License as published by the
// Free Software Foundation; either version 2, or (at your option)
// any later version.
// This library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License along
// with this library; see the file COPYING. If not, write to the Free
// Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301,
// USA.
// 27.6.1.3 unformatted input functions
#include <istream>
#include <sstream>
#include <testsuite_hooks.h>
void
test02()
{
typedef std::char_traits<wchar_t> traits_type;
bool test __attribute__((unused)) = true;
const wchar_t str_lit01[] = L"\t\t\t sun*ra \n"
L" "
L"and his myth science arkestra present\n"
L" "
L"angles and demons @ play\n"
L" "
L"the nubians of plutonia";
std::wstring str01(str_lit01);
std::wstring strtmp;
std::wstringbuf sbuf_04(str01, std::ios_base::in);
std::wistream is_00(NULL);
std::wistream is_04(&sbuf_04);
std::ios_base::iostate state1, state2, statefail, stateeof;
statefail = std::ios_base::failbit;
stateeof = std::ios_base::eofbit;
wchar_t carray1[400] = L"";
// istream& getline(wchar_t* s, streamsize n, wchar_t delim)
// istream& getline(wchar_t* s, streamsize n)
state1 = is_00.rdstate();
is_00.getline(carray1, 20, L'*');
state2 = is_00.rdstate();
// make sure failbit was set, since we couldn't extract
// from the NULL streambuf...
VERIFY( state1 != state2 );
VERIFY( static_cast<bool>(state2 & statefail) );
VERIFY( is_04.gcount() == 0 );
state1 = is_04.rdstate();
is_04.getline(carray1, 1, L'\t'); // extracts, throws away
state2 = is_04.rdstate();
VERIFY( is_04.gcount() == 1 );
VERIFY( state1 == state2 );
VERIFY( state1 == 0 );
VERIFY( !traits_type::compare(L"", carray1, 1) );
state1 = is_04.rdstate();
is_04.getline(carray1, 20, L'*');
state2 = is_04.rdstate();
VERIFY( is_04.gcount() == 10 );
VERIFY( state1 == state2 );
VERIFY( state1 == 0 );
VERIFY( !traits_type::compare(L"\t\t sun", carray1, 10) );
state1 = is_04.rdstate();
is_04.getline(carray1, 20);
state2 = is_04.rdstate();
VERIFY( is_04.gcount() == 4 );
VERIFY( state1 == state2 );
VERIFY( state1 == 0 );
VERIFY( !traits_type::compare(L"ra ", carray1, 4) );
state1 = is_04.rdstate();
is_04.getline(carray1, 65);
state2 = is_04.rdstate();
VERIFY( is_04.gcount() == 64 );
VERIFY( state1 != state2 );
VERIFY( state2 == statefail );
VERIFY( !traits_type::compare(
L" and his myth science arkestra presen",
carray1, 65) );
is_04.clear();
state1 = is_04.rdstate();
is_04.getline(carray1, 120, L'|');
state2 = is_04.rdstate();
VERIFY( is_04.gcount() == 106 );
VERIFY( state1 != state2 );
VERIFY( state2 == stateeof );
is_04.clear();
state1 = is_04.rdstate();
is_04.getline(carray1, 100, L'|');
state2 = is_04.rdstate();
VERIFY( is_04.gcount() == 0 );
VERIFY( state1 != state2 );
VERIFY( static_cast<bool>(state2 & stateeof) );
VERIFY( static_cast<bool>(state2 & statefail) );
}
int
main()
{
test02();
return 0;
}
| 30.756303
| 79
| 0.639891
|
c3c09b00f595db9dd5af36b6d65e1bf150dd63c4
| 452
|
cs
|
C#
|
TeoVincent.EventAggregator.Service.UnitTests/Mocks/PublisherCreator_Mock.cs
|
TeoVincent/Event-Aggregator-Through-net.pipe
|
fe648ab0ac175ff17a01b2d16b58d776341f1d43
|
[
"MIT"
] | 1
|
2017-03-17T04:45:48.000Z
|
2017-03-17T04:45:48.000Z
|
TeoVincent.EventAggregator.Service.UnitTests/Mocks/PublisherCreator_Mock.cs
|
TeoVincent/Event-Aggregator-Through-net.pipe
|
fe648ab0ac175ff17a01b2d16b58d776341f1d43
|
[
"MIT"
] | null | null | null |
TeoVincent.EventAggregator.Service.UnitTests/Mocks/PublisherCreator_Mock.cs
|
TeoVincent/Event-Aggregator-Through-net.pipe
|
fe648ab0ac175ff17a01b2d16b58d776341f1d43
|
[
"MIT"
] | null | null | null |
using TeoVincent.EA.Common.Service;
namespace TeoVincent.EA.Service.UnitTests.Mocks
{
public class PublisherCreator_Mock : IPublisherCreator
{
private readonly IEventPublisher eventPublisher;
public PublisherCreator_Mock(IEventPublisher eventPublisher)
{
this.eventPublisher = eventPublisher;
}
public IEventPublisher Create()
{
return eventPublisher;
}
}
}
| 23.789474
| 68
| 0.663717
|
affaf861e71f0cce4bc8008cc42860156d373647
| 696
|
py
|
Python
|
constants.py
|
Ed-Zh/Basketball-Analytics
|
e2b3658d3f45d20ab59121e2b44201e2f7d89a6e
|
[
"MIT"
] | 1
|
2021-08-05T10:37:28.000Z
|
2021-08-05T10:37:28.000Z
|
constants.py
|
Ed-Zh/Basketball-Analytics
|
e2b3658d3f45d20ab59121e2b44201e2f7d89a6e
|
[
"MIT"
] | null | null | null |
constants.py
|
Ed-Zh/Basketball-Analytics
|
e2b3658d3f45d20ab59121e2b44201e2f7d89a6e
|
[
"MIT"
] | null | null | null |
# Limbs
LEG_UPPER_RIGHT = (24,26)
LEG_LOWER_RIGHT = (26,28)
UPPER_BODY_RIGHT = (12,24)
ARM_UPPER_RIGHT = (12,14)
ARM_LOWER_RIGHT = (14,16)
FOOT_RIGHT = (28,32)
LIMBS_ALL = [LEG_UPPER_RIGHT,UPPER_BODY_RIGHT,ARM_UPPER_RIGHT,ARM_LOWER_RIGHT,FOOT_RIGHT]
# Joints
ANKLE_RIGHT = (26,28,32)
ELBOW_RIGHT = (12,14,16)
SHOULDER_RIGHT = (14,12,24)
HIP_RIGHT = (12,24,26)
KNEE_RIGHT = (24,26,28)
JOINTS_ALL = [ANKLE_RIGHT,ELBOW_RIGHT,SHOULDER_RIGHT,HIP_RIGHT,KNEE_RIGHT]
joint_to_text = {(26,28,32):'Ankle',(12,14,16):'Elbow',(14,12,24):'Shdlr', (12,24,26):'Hip', (24,26,28): 'Knee'}
joint_to_muscle = {(26,28,32):'Calf',(12,14,16):'Tricep',(14,12,24):'Shoulder', (12,24,26):'Glute', (24,26,28): 'Quad'}
| 31.636364
| 119
| 0.701149
|
93d7762e5e3966ff3b2f02eb47b007ca233c53f6
| 117
|
cs
|
C#
|
CP/Games/ClueBoardGameCP/Data/EnumNameList.cs
|
musictopia2/GamingPackXV3
|
dbc6ee2127f829b600fb084fc30b26f9020b8fde
|
[
"MIT"
] | null | null | null |
CP/Games/ClueBoardGameCP/Data/EnumNameList.cs
|
musictopia2/GamingPackXV3
|
dbc6ee2127f829b600fb084fc30b26f9020b8fde
|
[
"MIT"
] | null | null | null |
CP/Games/ClueBoardGameCP/Data/EnumNameList.cs
|
musictopia2/GamingPackXV3
|
dbc6ee2127f829b600fb084fc30b26f9020b8fde
|
[
"MIT"
] | null | null | null |
namespace ClueBoardGameCP.Data;
public enum EnumNameList
{
None, Peacock, Green, Plum, Scarlet, White, Mustard
}
| 23.4
| 55
| 0.760684
|
0d8b50e85436989934e005749e0109081414e2f0
| 3,098
|
h
|
C
|
iOS/AVOSChatDemo/AVOSChatDemo/third_party/MessagesTableViewController/JSBubbleMessageCell.h
|
aadebuger/leancloud-demo
|
0a4915366f32dd9d1c7498f7a1c82e4b903a781e
|
[
"MIT"
] | 1
|
2020-06-30T01:04:04.000Z
|
2020-06-30T01:04:04.000Z
|
iOS/AVOSChatDemo/AVOSChatDemo/third_party/MessagesTableViewController/JSBubbleMessageCell.h
|
EmoryLee/avoscloud-demo
|
52fd3df86d7785f6d7fedca18a0e1020edd2f62a
|
[
"MIT"
] | null | null | null |
iOS/AVOSChatDemo/AVOSChatDemo/third_party/MessagesTableViewController/JSBubbleMessageCell.h
|
EmoryLee/avoscloud-demo
|
52fd3df86d7785f6d7fedca18a0e1020edd2f62a
|
[
"MIT"
] | null | null | null |
//
// JSBubbleMessageCell.h
//
// Created by Jesse Squires on 2/12/13.
// Copyright (c) 2013 Hexed Bits. All rights reserved.
//
// http://www.hexedbits.com
//
//
// Largely based on work by Sam Soffes
// https://github.com/soffes
//
// SSMessagesViewController
// https://github.com/soffes/ssmessagesviewcontroller
//
//
// The MIT License
// Copyright (c) 2013 Jesse Squires
//
// Permission is hereby granted, free of charge, to any person obtaining a copy of this software and
// associated documentation files (the "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the
// following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT
// LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
// IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
// IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE
// OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
#import <UIKit/UIKit.h>
#import "JSBubbleView.h"
typedef enum {
JSAvatarStyleCircle = 0,
JSAvatarStyleSquare,
JSAvatarStyleNone
} JSAvatarStyle;
@interface JSBubbleMessageCell : UITableViewCell
@property (nonatomic, assign) BOOL isSelected;
@property (nonatomic, readonly, strong) UIImageView *imageView;
@property (nonatomic, readonly, strong) UIImage *renderedMark;
#pragma mark - Initialization
- (id)initWithBubbleType:(JSBubbleMessageType)type
bubbleStyle:(JSBubbleMessageStyle)bubbleStyle
avatarStyle:(JSAvatarStyle)avatarStyle
mediaType:(JSBubbleMediaType)mediaType
hasTimestamp:(BOOL)hasTimestamp
hasName:(BOOL)hasName
reuseIdentifier:(NSString *)reuseIdentifier;
#pragma mark - Message cell
- (void)setName:(NSString *)name;
- (void)setMessage:(NSString *)msg;
- (void)setMedia:(id)data;
- (void)setTimestamp:(NSDate *)date;
- (void)setAvatarImage:(UIImage *)image;
- (void)setAvatarImageTarget:(id)target action:(SEL)action;
//+ (CGFloat)neededHeightForText:(NSString *)bubbleViewText
// timestamp:(BOOL)hasTimestamp
// avatar:(BOOL)hasAvatar;
//
//+ (CGFloat)neededHeightForImage:(UIImage *)bubbleViewImage
// timestamp:(BOOL)hasTimestamp
// avatar:(BOOL)hasAvatar;
+ (CGFloat)neededHeightForText:(NSString *)bubbleViewText timestamp:(BOOL)hasTimestamp name:(BOOL)hasName avatar:(BOOL)hasAvatar;
+ (CGFloat)neededHeightForImage:(UIImage *)bubbleViewImage timestamp:(BOOL)hasTimestamp name:(BOOL)hasName avatar:(BOOL)hasAvatar;
@end
| 38.246914
| 130
| 0.723693
|
54a303d904f4a575cd46d4b395f204d185061800
| 1,539
|
css
|
CSS
|
src/components/Common/Checkbox.css
|
rzaba0/soldat-launcher
|
238e197a18423b7331ab9b8882def16cd338627d
|
[
"MIT"
] | 8
|
2020-10-05T06:27:48.000Z
|
2021-09-14T01:45:00.000Z
|
src/components/Common/Checkbox.css
|
rzaba0/soldat-launcher
|
238e197a18423b7331ab9b8882def16cd338627d
|
[
"MIT"
] | 6
|
2020-10-05T13:27:54.000Z
|
2021-01-26T23:27:04.000Z
|
src/components/Common/Checkbox.css
|
rzaba0/soldat-launcher
|
238e197a18423b7331ab9b8882def16cd338627d
|
[
"MIT"
] | 1
|
2021-11-26T16:14:56.000Z
|
2021-11-26T16:14:56.000Z
|
.checkbox-container {
cursor: pointer;
display: inline-flex;
align-self: flex-start;
align-items: center;
}
/* Hide the browser's default checkbox */
.checkbox-container input {
opacity: 0;
cursor: pointer;
position: absolute;
left: -9999px;
z-index: -1;
}
/* Create a custom checkbox */
.checkbox-container .checkbox {
display: inline-flex;
align-items: center;
justify-content: center;
height: 1.2em;
width: 1.2em;
}
.checkbox-container .checkbox .check-icon {
visibility: hidden;
}
.checkbox-container input:checked ~ .checkbox .check-icon {
/* Inheriting visibility should handle scenario with
* checkbox inside tooltips. Without inheriting, the
* checkbox icon would still be shown, despite the tooltip
* being hidden. */
visibility: inherit;
}
.checkbox-container .right-label {
margin-left: 0.7em;
flex: 1;
}
.checkbox-container .left-label {
margin-right: 0.7em;
flex: 1;
}
/* Color themes */
.checkbox-container.dark .checkbox {
background-color: #282828;
border: 1px solid #828282;
}
.checkbox-container.dark:hover .checkbox {
background-color: #424954;
}
.checkbox-container.dark .checkbox .check-icon {
color: #ffffff;
}
.checkbox-container.light .checkbox {
background-color: #ffffff;
border: 1px solid #cccccc;
}
.checkbox-container.light:hover .checkbox {
background-color: #f2f2f2;
border: 1px solid #b3b3b3;
}
| 21.676056
| 63
| 0.640026
|
ef54f9c8fa238a7a93141cc0c667141adf461833
| 875
|
lua
|
Lua
|
config/layouts/tile/init.lua
|
nephitejnf/japokwm
|
7cf137a1c21c80f1ea4dc5dc2c74645d998810f0
|
[
"BSD-2-Clause"
] | null | null | null |
config/layouts/tile/init.lua
|
nephitejnf/japokwm
|
7cf137a1c21c80f1ea4dc5dc2c74645d998810f0
|
[
"BSD-2-Clause"
] | null | null | null |
config/layouts/tile/init.lua
|
nephitejnf/japokwm
|
7cf137a1c21c80f1ea4dc5dc2c74645d998810f0
|
[
"BSD-2-Clause"
] | null | null | null |
local layout_data = {
{
{0, 0, 1, 1},
},
{
{0.0, 0.0, 0.5, 1.0},
{0.5, 0.0, 0.5, 1.0},
},
{
{0.0, 0.0, 0.5, 1.0},
{0.5, 0.0, 0.5, 0.5},
{0.5, 0.5, 0.5, 0.5},
},
{
{0.0, 0.000, 0.5, 1.000},
{0.5, 0.000, 0.5, 0.333},
{0.5, 0.333, 0.5, 0.333},
{0.5, 0.666, 0.5, 0.333},
},
{
{0.0, 0.00, 0.5, 1.00},
{0.5, 0.00, 0.5, 0.25},
{0.5, 0.25, 0.5, 0.25},
{0.5, 0.50, 0.5, 0.25},
{0.5, 0.75, 0.5, 0.25},
},
}
layout:set(layout_data)
opt.hidden_edges = Direction.all
opt.smart_hidden_edges = false
opt.resize_direction = Direction.right
opt:set_layout_constraints({min_width = 0.1, max_width = 1, min_height = 0.1, max_height = 1})
opt:set_master_constraints({min_width = 0.1, max_width = 1, min_height = 0.1, max_height = 1})
| 25
| 94
| 0.459429
|
800f2f07502ae26b6637b41bda5454895e92d6a8
| 622
|
sql
|
SQL
|
clients/mysql-schema/generated/Model/DefaultCrumbIssuer.sql
|
cliffano/jenkins-api-clients-generator
|
522d02b3a130a29471df5ec1d3d22c822b3d0813
|
[
"MIT"
] | null | null | null |
clients/mysql-schema/generated/Model/DefaultCrumbIssuer.sql
|
cliffano/jenkins-api-clients-generator
|
522d02b3a130a29471df5ec1d3d22c822b3d0813
|
[
"MIT"
] | null | null | null |
clients/mysql-schema/generated/Model/DefaultCrumbIssuer.sql
|
cliffano/jenkins-api-clients-generator
|
522d02b3a130a29471df5ec1d3d22c822b3d0813
|
[
"MIT"
] | null | null | null |
--
-- Swaggy Jenkins.
-- Prepared SQL queries for 'DefaultCrumbIssuer' definition.
--
--
-- SELECT template for table `DefaultCrumbIssuer`
--
SELECT `_class`, `crumb`, `crumbRequestField` FROM `DefaultCrumbIssuer` WHERE 1;
--
-- INSERT template for table `DefaultCrumbIssuer`
--
INSERT INTO `DefaultCrumbIssuer`(`_class`, `crumb`, `crumbRequestField`) VALUES (?, ?, ?);
--
-- UPDATE template for table `DefaultCrumbIssuer`
--
UPDATE `DefaultCrumbIssuer` SET `_class` = ?, `crumb` = ?, `crumbRequestField` = ? WHERE 1;
--
-- DELETE template for table `DefaultCrumbIssuer`
--
DELETE FROM `DefaultCrumbIssuer` WHERE 0;
| 23.037037
| 91
| 0.709003
|
1a329d1f7a1e9c48bf57c34961a27d0afa20c083
| 1,249
|
py
|
Python
|
plot-psd.py
|
patrickmelix/RASPA2-tools
|
127d6f9789e350e660e808c6bd9daf9f4addf194
|
[
"MIT"
] | null | null | null |
plot-psd.py
|
patrickmelix/RASPA2-tools
|
127d6f9789e350e660e808c6bd9daf9f4addf194
|
[
"MIT"
] | null | null | null |
plot-psd.py
|
patrickmelix/RASPA2-tools
|
127d6f9789e350e660e808c6bd9daf9f4addf194
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
import sys, os
import numpy as np
import matplotlib
from matplotlib import pyplot as plt
matplotlib.rcParams['errorbar.capsize'] = 6
matplotlib.rcParams['axes.grid'] = True
matplotlib.rcParams['font.size'] = 18
matplotlib.rcParams['figure.figsize'] = (9.75, 5.85) #(10, 6)
matplotlib.rcParams['savefig.dpi'] = 600
def main(inFile,outFile):
if not os.path.isfile(inFile):
raise ValueError('File {:} does not exist'.format(str(inFile)))
#if output exists mv to .bak
if os.path.isfile(outFile):
print('ATTENTION: {:} exists, moving to *.bak'.format(outFile))
os.rename(outFile, outFile+'.bak')
x, y = np.loadtxt(inFile, skiprows=4, usecols=(0,2), unpack=True)
plt.xlabel("Pore Diameter [Å]")
plt.ylabel("Pore-Size Distribution")
plt.xlim([min(x),max(x)])
plt.plot(x,y)
plt.tight_layout()
plt.savefig(outFile)
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(description='Plot RASPA Pore-Size Distribution')
parser.add_argument('input', type=str, help='RASPA PSD Output File')
parser.add_argument('output', type=str, help='Filename for the PNG Output')
args = parser.parse_args()
main(args.input,args.output)
| 32.025641
| 85
| 0.685348
|
0df4d298d4cc0d4eccc77f7f03f0ea16195c6710
| 795
|
rb
|
Ruby
|
samples/Capture/CnpForceCaptureTransaction.rb
|
Vantiv/cnp-sdk-for-ruby
|
53375ca384931550727a8171a28af071741516d6
|
[
"MIT"
] | 1
|
2019-01-25T19:15:26.000Z
|
2019-01-25T19:15:26.000Z
|
samples/Capture/CnpForceCaptureTransaction.rb
|
Vantiv/cnp-sdk-for-ruby
|
53375ca384931550727a8171a28af071741516d6
|
[
"MIT"
] | 1
|
2018-04-05T14:08:02.000Z
|
2018-04-05T14:24:54.000Z
|
samples/Capture/CnpForceCaptureTransaction.rb
|
Vantiv/cnp-sdk-for-ruby
|
53375ca384931550727a8171a28af071741516d6
|
[
"MIT"
] | 4
|
2018-04-05T14:11:16.000Z
|
2019-04-28T19:52:57.000Z
|
require_relative '../../lib/CnpOnline'
#Force Capture
force_capture_info = {
'merchantId' => '101',
'id'=>'test',
'version'=>'8.8',
'reportGroup'=>'Planets',
'cnpTxnId'=>'123456',
'orderId'=>'12344',
'amount'=>'106',
'orderSource'=>'ecommerce',
'card'=>{
'type'=>'VI',
'number' =>'4100000000000001',
'expDate' =>'1210'
}
}
response= CnpOnline::CnpOnlineRequest.new.force_capture(force_capture_info)
#display results
puts "Response: " + response.forceCaptureResponse.response
puts "Message: " + response.forceCaptureResponse.message
puts "Cnp Transaction ID: " + response.forceCaptureResponse.cnpTxnId
if (!response.forceCaptureResponse.message.eql?'Transaction Received')
raise ArgumentError, "CnpForceCaptureTransaction has not been Approved", caller
end
| 29.444444
| 82
| 0.708176
|
a1537fa6c12c3a25814183f26781b039e255e1c3
| 800
|
ts
|
TypeScript
|
projects/angular-walkthrough/src/lib/angular-walkthrough.module.ts
|
hkjeffchan/ng-walkthrough
|
32374c9761e8c2cae77d1ef16f222e1feb8b6eb5
|
[
"MIT"
] | 17
|
2018-08-07T12:34:25.000Z
|
2021-12-08T20:32:10.000Z
|
projects/angular-walkthrough/src/lib/angular-walkthrough.module.ts
|
hkjeffchan/ng-walkthrough
|
32374c9761e8c2cae77d1ef16f222e1feb8b6eb5
|
[
"MIT"
] | 20
|
2018-08-07T12:35:27.000Z
|
2022-02-25T12:26:00.000Z
|
projects/angular-walkthrough/src/lib/angular-walkthrough.module.ts
|
hkjeffchan/ng-walkthrough
|
32374c9761e8c2cae77d1ef16f222e1feb8b6eb5
|
[
"MIT"
] | 11
|
2018-08-13T00:27:54.000Z
|
2022-02-23T17:28:03.000Z
|
import { PortalModule } from '@angular/cdk/portal';
import { CommonModule } from '@angular/common';
import { NgModule } from '@angular/core';
import { WalkthroughContainerComponent } from './walkthrough-container.component';
import { WalkthroughFlowComponent } from './walkthrough-flow.component';
import { WalkthroughComponent } from './walkthrough.component';
import { WalkthroughService } from './walkthrough.service';
@NgModule({
imports: [CommonModule, PortalModule],
declarations: [WalkthroughFlowComponent, WalkthroughComponent, WalkthroughContainerComponent],
exports: [WalkthroughFlowComponent, WalkthroughComponent, WalkthroughContainerComponent],
entryComponents: [WalkthroughContainerComponent],
providers: [WalkthroughService],
})
export class WalkthroughModule {}
| 44.444444
| 98
| 0.78375
|
ab7f7c23a0805218b2de9cc32b1074d7c5ff1fd3
| 999
|
rb
|
Ruby
|
spec/unit/parser_spec.rb
|
blelump/stats_whisper
|
e3621cc01c24e03824c51618aa6eade18fcb3db6
|
[
"MIT"
] | null | null | null |
spec/unit/parser_spec.rb
|
blelump/stats_whisper
|
e3621cc01c24e03824c51618aa6eade18fcb3db6
|
[
"MIT"
] | null | null | null |
spec/unit/parser_spec.rb
|
blelump/stats_whisper
|
e3621cc01c24e03824c51618aa6eade18fcb3db6
|
[
"MIT"
] | null | null | null |
require 'spec_helper'
require 'stats_whisper/parser'
describe StatsWhisper::Parser do
let(:ctxt) { Class.new { extend StatsWhisper::Parser } }
describe "#parse" do
context "when req path is empty" do
it "resolves base uri" do
path = "/"
expect(ctxt.parse(path)).to eq("home_page")
end
end
context "when req path is root path" do
it "resolves root path" do
path = "/en"
expect(ctxt.parse(path)).to eq("home_page")
end
end
context "when req path points to a resource" do
it "resolves resource path" do
path = "/en/dashboard"
expect(ctxt.parse(path)).to eq("dashboard")
path = "/2015-2016/pl/treasuries/academy_units/offer"
expect(ctxt.parse(path)).to eq("treasuries.academy_units.offer")
end
end
end
describe "#build_key" do
it "generates valid Graphite key" do
expect(ctxt.build_key("bleh", 'blah', "dash")).to eq("bleh.blah.dash")
end
end
end
| 24.365854
| 76
| 0.622623
|
2576f7915e7de3f3ed68c0a2ae7d77a62a04e6f4
| 895
|
js
|
JavaScript
|
Examples/plugin.js
|
dubisdev/cerebro-command-router
|
b8cad9121146391e503e5667962be90ed7d91101
|
[
"MIT"
] | null | null | null |
Examples/plugin.js
|
dubisdev/cerebro-command-router
|
b8cad9121146391e503e5667962be90ed7d91101
|
[
"MIT"
] | 2
|
2021-08-13T10:54:51.000Z
|
2021-08-17T11:15:27.000Z
|
Examples/plugin.js
|
dubisdev/cerebro-command-router
|
b8cad9121146391e503e5667962be90ed7d91101
|
[
"MIT"
] | null | null | null |
import icon from "./icons";
import CerebroRouter from "cerebro-command-router";
if (!Notification.permission) Notification.requestPermission();
function fn({ term, display }) {
const myRouter = new CerebroRouter({ command: "example", term, display });
myRouter.route("com1", {
icon: icon,
title: `Example Plugin command 1`,
getPreview: () => <h2>This is command 1 :)</h2>,
onSelect: () => new Notification("You selected subcommand 1"),
});
myRouter.route("com2", {
icon: icon,
title: `Example Plugin command 2`,
getPreview: () => <h2>This is command 2 :)</h2>,
});
myRouter.invalidRoute({
icon: icon,
title: `Invalid Example Command`,
});
}
// ----------------- Plugin settings --------------------- //
const name = "Example Plugin";
const keyword = "example";
// ----------------- END Plugin settings --------------------- //
export { icon, name, keyword, fn };
| 26.323529
| 75
| 0.605587
|
d24484f4eda9a312d7fd4f64f7980bb0ec0d8c23
| 463
|
rs
|
Rust
|
tests/collect.rs
|
tronta/async-std
|
c9294b5657996ffab30528825e978d7505096b6e
|
[
"Apache-2.0",
"MIT"
] | null | null | null |
tests/collect.rs
|
tronta/async-std
|
c9294b5657996ffab30528825e978d7505096b6e
|
[
"Apache-2.0",
"MIT"
] | null | null | null |
tests/collect.rs
|
tronta/async-std
|
c9294b5657996ffab30528825e978d7505096b6e
|
[
"Apache-2.0",
"MIT"
] | null | null | null |
#[cfg(feature = "unstable")]
#[test]
fn test_send() -> async_std::io::Result<()> {
use async_std::prelude::*;
use async_std::{stream, task};
task::block_on(async {
fn test_send_trait<T: Send>(_: &T) {}
let stream = stream::repeat(1u8).take(10);
test_send_trait(&stream);
let fut = stream.collect::<Vec<_>>();
// This line triggers a compilation error
test_send_trait(&fut);
Ok(())
})
}
| 22.047619
| 50
| 0.559395
|
d62e7cbb8025a94c90bce292c3fe367ae7759c62
| 3,699
|
cs
|
C#
|
Assets/Scripts/GamePlay/ObjectGame/DefuseManager.cs
|
sergey-cs/SwipeTo
|
3df43489e6c539b7c3e401865406ca6011a9c8e2
|
[
"MIT"
] | 8
|
2018-09-20T20:13:04.000Z
|
2021-08-18T01:35:12.000Z
|
Assets/Scripts/GamePlay/ObjectGame/DefuseManager.cs
|
sergey-cs/SwipeTo
|
3df43489e6c539b7c3e401865406ca6011a9c8e2
|
[
"MIT"
] | null | null | null |
Assets/Scripts/GamePlay/ObjectGame/DefuseManager.cs
|
sergey-cs/SwipeTo
|
3df43489e6c539b7c3e401865406ca6011a9c8e2
|
[
"MIT"
] | null | null | null |
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class DefuseManager : MonoBehaviour
{
public float MinValueDefuse = 0.33f;
public float MaxValueDefuse = 1;
// public UnityEngine.Events.UnityEvent OnIntialize;
[SerializeField] private bool SendEventChange = false;
[SerializeField] private UnityEngine.Events.UnityEvent ChangeDefuse;
public event System.Action EventChangeDefuse;
[SerializeField] private bool SendEventFinish = true;
[SerializeField] private UnityEngine.Events.UnityEvent FinishDefuse;
public float DefuseFactor = 0.1f;
public float UndefuseMaxFactor = 2f;
public bool StopUndefuseWhenFullDefuse = true;
private float _DefuseScore, _PreviousScore;
public float UndefuseScore { set; get; }
public static float UndefuseFactor = 3.5f;
public float IntervalDefuse
{
get
{
return MaxValueDefuse - MinValueDefuse;
}
}
// return scoped value at 0..1
public float ScopeDefuse
{
set
{
_PreviousScore = _DefuseScore;
_DefuseScore = value * IntervalDefuse + MinValueDefuse;
if (_DefuseScore != _PreviousScore)
{
if (SendEventChange)
ChangeDefuse.Invoke();
if (EventChangeDefuse != null)
EventChangeDefuse();
if (_DefuseScore == MinValueDefuse)
{
if (SendEventFinish)
FinishDefuse.Invoke();
}
else if (StopUndefuseWhenFullDefuse && _DefuseScore >= MaxValueDefuse)
UndefuseScore = 0;
}
}
get
{
return (_DefuseScore - MinValueDefuse) / IntervalDefuse;
}
}
public float DefuseScore
{
set
{
_PreviousScore = _DefuseScore;
_DefuseScore = Mathf.Clamp(value, MinValueDefuse, MaxValueDefuse);
if (_DefuseScore != _PreviousScore)
{
if (SendEventChange)
ChangeDefuse.Invoke();
if (EventChangeDefuse != null)
EventChangeDefuse();
if (_DefuseScore == MinValueDefuse)
{
if (SendEventFinish)
FinishDefuse.Invoke();
}
else if (StopUndefuseWhenFullDefuse && _DefuseScore >= MaxValueDefuse)
UndefuseScore = 0;
}
}
get { return _DefuseScore; }
}
public void Undefuse(float value)
{
UndefuseScore += value;
if (UndefuseScore > DefuseFactor * UndefuseMaxFactor)
UndefuseScore = DefuseFactor * UndefuseMaxFactor;
}
public void UndefuseMax()
{
UndefuseScore = MaxValueDefuse * 1.5f;
}
public bool ActiveDefuse
{
set { enabled = value; }
get { return enabled; }
}
private float _diff;
public bool Defusing = true;
/// <summary>
/// Update is called every frame, if the MonoBehaviour is enabled.
/// </summary>
void Update()
{
if (Defusing)
{
_diff = UndefuseScore * Time.deltaTime * UndefuseFactor;
UndefuseScore -= _diff;
DefuseScore += _diff - (DefuseFactor * Time.deltaTime);
}
}
private bool _initialized;
public void Initialize()
{
if (_initialized) return;
_initialized = true;
//Initialize logic
// OnIntialize.Invoke();
}
void Awake()
{
Initialize();
}
}
| 26.234043
| 86
| 0.560962
|
b91d2b2c3c190d973064f397b89b8bd6ba112e77
| 1,800
|
css
|
CSS
|
src/main/resources/static/css/homepage.css
|
Bigdog93/WACHACHA
|
0c40fe0743eeef787954969f13f405fcf9195b5a
|
[
"MIT"
] | 2
|
2021-07-27T03:11:26.000Z
|
2021-08-21T14:47:43.000Z
|
src/main/resources/static/css/homepage.css
|
Bigdog93/WACHACHA
|
0c40fe0743eeef787954969f13f405fcf9195b5a
|
[
"MIT"
] | 12
|
2021-07-26T07:47:51.000Z
|
2021-08-28T05:33:27.000Z
|
src/main/resources/static/css/homepage.css
|
Bigdog93/WACHACHA
|
0c40fe0743eeef787954969f13f405fcf9195b5a
|
[
"MIT"
] | 1
|
2021-07-23T00:42:47.000Z
|
2021-07-23T00:42:47.000Z
|
.section__1{background-image: linear-gradient( rgba(0, 0, 0, 0.3), rgba(0, 0, 0, 0.3) ), url("../img/home/방탄소년단.png"); width: 100%; height: 100vh}
.section__2{background-image: linear-gradient( rgba(0, 0, 0, 0.3), rgba(0, 0, 0, 0.3) ),url("../img/home/승리호.png"); width: 100%; height: 100vh}
.section__3{background-image: linear-gradient( rgba(0, 0, 0, 0.3), rgba(0, 0, 0, 0.3) ),url("../img/home/이태원클라스 배경1.png"); width: 100%; height: 100vh}
.section__4{background-image: linear-gradient( rgba(0, 0, 0, 0.3), rgba(0, 0, 0, 0.3) ),url("../img/home/킹덤.png"); width: 100%; height: 100vh}
a{text-decoration: none;}
a:hover{color: black;}
.box{
height: 100vh;
width: 100%;
min-width: 872px;
display: flex;
background-size: cover;
background-position: center;
}
.inner section{
height: 800px;
display: flex;
}
.inner{
width: 100%;
text-align: center;
display: inline-block;
position: absolute;
margin-top: 343px;
}
.inner h2{
margin-top: 20px;
margin-bottom: 20px;
font-family: Noto Sans;
font-style: normal;
font-weight: bold;
font-size: 35.69px;
line-height: 49px;
color: #FFFFFF;
}
.inner h4{
margin-top: 20px;
margin-bottom: 39px;
font-family: Noto Sans;
font-style: normal;
font-weight: bold;
font-size: 17.7px;
line-height: 24px;
color: rgba(255, 255, 255, 0.8);
}
.inner a{
color: #fff;
background-color: #f82f62;
border-radius: 72px;
width: 168px;
margin-top: 100px;
padding: 10px 16px;
}
.inner content{
align-items: center;
}
.toggle{
display: flex;
justify-content: center;
align-items: flex-end;
width: 100%;
height: 100vh;
}
.fas{ color: white; font-size: 40px; margin-bottom: 30px; opacity: 0.8}
.fas:hover{cursor: pointer}
| 25.714286
| 150
| 0.623889
|
b03f88e16c879a83208558d3fc39b1f8ba23e0d7
| 1,260
|
py
|
Python
|
src/Crypto/Cesar/cesarToTxtClass.py
|
edouard-lebas/CryptoGenerator
|
d43f2b3fa472493f49015e8402e382681692a63c
|
[
"MIT"
] | null | null | null |
src/Crypto/Cesar/cesarToTxtClass.py
|
edouard-lebas/CryptoGenerator
|
d43f2b3fa472493f49015e8402e382681692a63c
|
[
"MIT"
] | null | null | null |
src/Crypto/Cesar/cesarToTxtClass.py
|
edouard-lebas/CryptoGenerator
|
d43f2b3fa472493f49015e8402e382681692a63c
|
[
"MIT"
] | null | null | null |
class cesarToTxtClass:
def __init__(self, cesar, offset=None):
self.cesar = cesar.decode("utf-8").upper()
self.alphabet = "ABCDEFGHIJKLMNOPQRSTUVWXYZ".decode("utf-8")
if offset != None:
self.offset = offset
def processCesarWithOffset(self):
decrypted = ""
for c in self.cesar:
if c in self.alphabet:
index = self.alphabet.find(c)
index = index - self.offset
if index < 0:
index = index + len(self.alphabet)
decrypted += self.alphabet[index]
else:
decrypted += self.alphabet[index]
return decrypted
def processCesarWithoutOffset(self):
decrypted = []
for a in range(1,len(self.alphabet)+1):
word = ""
for c in self.cesar:
if c in self.alphabet:
index = self.alphabet.find(c)
index = index - a
if index < 0:
index = index + len(self.alphabet)
word += self.alphabet[index]
else:
word += self.alphabet[index]
decrypted.append(word)
return decrypted
| 34.054054
| 68
| 0.493651
|
5bda600ee1077397f6be21813c90bc6663f15223
| 904
|
css
|
CSS
|
1_basic/todo.css
|
Zanyangwang/cluster
|
6ed312ecfa93f3477ea29a8433ff8fe9331aa687
|
[
"MIT"
] | null | null | null |
1_basic/todo.css
|
Zanyangwang/cluster
|
6ed312ecfa93f3477ea29a8433ff8fe9331aa687
|
[
"MIT"
] | null | null | null |
1_basic/todo.css
|
Zanyangwang/cluster
|
6ed312ecfa93f3477ea29a8433ff8fe9331aa687
|
[
"MIT"
] | null | null | null |
/* 基本スタイル */
h1{
font-size: 32px;
margin: 0px;
padding: 4px;
color: #222288;
}
hr{
margin-top: 8px;
margin-bottom: 8px;
}
/* ページ固有スタイル */
th.login_field{
width: 150px;
text-align: left;
padding: 8px;
color: #ffffff;
background-color: #aaaaff;
}
td.login_field{
padding: 8px;
}
td.login_button{
text-align: center;
padding: 8px;
}
table.toolbar {
margin-top: 16px;
margin-bottom: 32px;
}
table.list th {
width: 150px;
text-align: left;
padding: 8px;
color: #ffffff;
background-color: #5555aa;
}
table.list td {
background-color: #bbbbff;
padding: 8px;
}
th.add_field {
width: 150px;
text-align: left;
padding: 8px;
color: #ffffff;
background-color: #aaaaff;
}
td.add_field {
padding: 8px;
}
td.add_button {
text-align: center;
padding: 8px;
}
td.error{
color: red;
}
| 13.101449
| 30
| 0.59292
|
cc5d872ec825b2bf089180df1fe86269d5ff9680
| 1,860
|
rb
|
Ruby
|
lib/paradeiser/models/repository.rb
|
nerab/paradeiser
|
a89f9e7ab00b0ef760fdd97e3735ecca34bb9378
|
[
"MIT"
] | 1
|
2015-09-02T18:39:28.000Z
|
2015-09-02T18:39:28.000Z
|
lib/paradeiser/models/repository.rb
|
nerab/paradeiser
|
a89f9e7ab00b0ef760fdd97e3735ecca34bb9378
|
[
"MIT"
] | 3
|
2017-10-23T13:07:57.000Z
|
2019-01-01T02:13:52.000Z
|
lib/paradeiser/models/repository.rb
|
nerab/paradeiser
|
a89f9e7ab00b0ef760fdd97e3735ecca34bb9378
|
[
"MIT"
] | 1
|
2015-09-30T17:07:08.000Z
|
2015-09-30T17:07:08.000Z
|
module Paradeiser
class Repository
class << self
def all
backend.transaction(true) do
backend.roots.map{|id| backend[id]}
end
end
def all_pomodori
all.select{|p| p.kind_of?(Pomodoro)}
end
def all_breaks
all.select{|b| b.kind_of?(Break)}
end
def any?(&blk)
all.any?(&blk)
end
def find(&blk)
all.select(&blk)
end
def active
all_active = find{|pom| pom.active?}.sort{|a,b| a.started_at <=> b.started_at}
# Cannot recover from an internal inconsistency.
if all_active.size > 1
raise "The repository was corrupted. There are #{all_active.size} active objects, but only one is allowed to be active."
end
all_active.last
end
def active?
!!active
end
def last_finished
find{|p| p.finished?}.sort{|a,b| a.started_at <=> b.started_at}.last
end
def last_canceled
find{|p| p.canceled?}.sort{|a,b| a.started_at <=> b.started_at}.last
end
def save(pom)
raise IllegalStatusError if pom.idle?
# Do not allow saving of a new active pomodoro while another pomodoro or break is active
raise SingletonError.new(pom.class, self.active, :save) if self.active? && pom.new?
pom.id = next_id if pom.new?
backend.transaction do
backend[pom.id] = pom
end
end
def next_id
if all.empty?
1
else
all.max{|a, b| a.id <=> b.id}.id.next
end
end
private
def backend
begin
@backend ||= PStore.new(File.join(Paradeiser.par_dir, 'repository.pstore'), true)
rescue PStore::Error => e
raise NotInitializedError.new(e.message)
end
end
end
end
end
| 22.962963
| 130
| 0.565054
|
5fecb2574971f6e14279be3635a3522d4f741d51
| 270
|
rb
|
Ruby
|
spec/models/person_spec.rb
|
matthijsgroen/game-of-geese
|
cf32edbc1cd8eb5523ba29eb0e945fa6735fe5a4
|
[
"MIT"
] | null | null | null |
spec/models/person_spec.rb
|
matthijsgroen/game-of-geese
|
cf32edbc1cd8eb5523ba29eb0e945fa6735fe5a4
|
[
"MIT"
] | null | null | null |
spec/models/person_spec.rb
|
matthijsgroen/game-of-geese
|
cf32edbc1cd8eb5523ba29eb0e945fa6735fe5a4
|
[
"MIT"
] | null | null | null |
require 'spec_helper'
describe Person do
describe 'attributes' do
it 'has a name' do
p = Person.new name: 'Henk'
expect(p.name).to eql 'Henk'
end
it 'has an age' do
p = Person.new age: 34
expect(p.age).to eql 34
end
end
end
| 16.875
| 34
| 0.592593
|
591f8cdb994747868668304a1ff5b68df0ab8e4d
| 999
|
sql
|
SQL
|
JobExecutionFramework/SSISDB/catalog/Views/explicit_object_permissions.sql
|
MS-BI/JobExceutionFramework
|
276fac1f0ba37cf2368c96f7d7fd0c0df8f1d2f7
|
[
"MIT"
] | 3
|
2016-08-29T09:41:38.000Z
|
2016-11-08T15:37:44.000Z
|
JobExecutionFramework/SSISDB/catalog/Views/explicit_object_permissions.sql
|
MS-BI/JobExecutionFramework
|
276fac1f0ba37cf2368c96f7d7fd0c0df8f1d2f7
|
[
"MIT"
] | null | null | null |
JobExecutionFramework/SSISDB/catalog/Views/explicit_object_permissions.sql
|
MS-BI/JobExecutionFramework
|
276fac1f0ba37cf2368c96f7d7fd0c0df8f1d2f7
|
[
"MIT"
] | null | null | null |
CREATE VIEW [catalog].[explicit_object_permissions]
AS
SELECT op.[object_type],
op.[object_id],
[internal].[get_principal_id_by_sid](op.[sid]) as [principal_id],
op.[permission_type],
op.[is_deny],
[internal].[get_principal_id_by_sid](op.[grantor_sid]) as [grantor_id]
FROM [internal].[object_permissions] op
INNER JOIN [catalog].[effective_object_permissions] eop
ON op.[object_type] = eop.[object_type]
AND op.[object_id] = eop.[object_id]
AND eop.[permission_type] = 1
UNION
SELECT op.[object_type],
op.[object_id],
[internal].[get_principal_id_by_sid](op.[sid]) as [principal_id],
op.[permission_type],
op.[is_deny],
[internal].[get_principal_id_by_sid](op.[grantor_sid]) as [grantor_id]
FROM [internal].[object_permissions] op
WHERE IS_MEMBER('ssis_admin') = 1
OR IS_SRVROLEMEMBER('sysadmin') = 1
| 37
| 81
| 0.615616
|
02a8b5e456ab04346a8605f66c1ebbd3ae9c3fef
| 4,411
|
cpp
|
C++
|
ContourTiler/CloseContourRanker.cpp
|
GuMiner/agow
|
b8665d5879f43a6bcb6e878827b3b25af9cc1b1d
|
[
"MIT"
] | null | null | null |
ContourTiler/CloseContourRanker.cpp
|
GuMiner/agow
|
b8665d5879f43a6bcb6e878827b3b25af9cc1b1d
|
[
"MIT"
] | null | null | null |
ContourTiler/CloseContourRanker.cpp
|
GuMiner/agow
|
b8665d5879f43a6bcb6e878827b3b25af9cc1b1d
|
[
"MIT"
] | null | null | null |
#include "CloseContourRanker.h"
CloseContourRanker::CloseContourRanker()
: closestLine(CloseContourLine()), secondClosestLine(CloseContourLine()), thirdClosestLine(CloseContourLine())
{
}
bool CloseContourRanker::ResortIfIdentical(CloseContourLine contourLine)
{
if (closestLine.elevationId == contourLine.elevationId)
{
// Identical, resorting done.
if (contourLine.distanceSqd < closestLine.distanceSqd)
{
closestLine.distanceSqd = contourLine.distanceSqd;
}
return true;
}
else if (secondClosestLine.elevationId == contourLine.elevationId)
{
if (contourLine.distanceSqd < secondClosestLine.distanceSqd)
{
secondClosestLine.distanceSqd = contourLine.distanceSqd;
if (secondClosestLine.distanceSqd < closestLine.distanceSqd)
{
// Swap
CloseContourLine other;
other.CopyFrom(closestLine);
closestLine.CopyFrom(secondClosestLine);
secondClosestLine.CopyFrom(other);
}
}
return true;
}
else if (thirdClosestLine.elevationId == contourLine.elevationId)
{
if (contourLine.distanceSqd < thirdClosestLine.distanceSqd)
{
thirdClosestLine.distanceSqd = contourLine.distanceSqd;
if (thirdClosestLine.distanceSqd < closestLine.distanceSqd)
{
// Swap and move down #2
CloseContourLine other;
other.CopyFrom(closestLine);
closestLine.CopyFrom(thirdClosestLine);
thirdClosestLine.CopyFrom(secondClosestLine);
secondClosestLine.CopyFrom(other);
}
else if (thirdClosestLine.distanceSqd < secondClosestLine.distanceSqd)
{
CloseContourLine other;
other.CopyFrom(secondClosestLine);
secondClosestLine.CopyFrom(thirdClosestLine);
thirdClosestLine.CopyFrom(other);
}
}
return true;
}
return false;
}
void CloseContourRanker::AddElevationToRank(const CloseContourLine& contourLine)
{
if (ResortIfIdentical(contourLine))
{
return;
}
// Not identical, figure out if it we need to insert this contour line anywhere.
// Handle each contour sequentially, filling it in automatically if empty.
if (closestLine.elevationId == -1)
{
closestLine.CopyFrom(contourLine);
return;
}
else if (contourLine.distanceSqd < closestLine.distanceSqd)
{
// Move all down.
thirdClosestLine.CopyFrom(secondClosestLine);
secondClosestLine.CopyFrom(closestLine);
closestLine.CopyFrom(contourLine);
return;
}
if (secondClosestLine.elevationId == -1)
{
secondClosestLine.CopyFrom(contourLine);
return;
}
else if (contourLine.distanceSqd < secondClosestLine.distanceSqd)
{
// Move second and third down.
thirdClosestLine.CopyFrom(secondClosestLine);
secondClosestLine.CopyFrom(contourLine);
return;
}
if (thirdClosestLine.elevationId == -1 || contourLine.distanceSqd < thirdClosestLine.distanceSqd)
{
thirdClosestLine.CopyFrom(contourLine);
return;
}
}
bool CloseContourRanker::FilledSufficientLines() const
{
return closestLine.elevationId != -1 && secondClosestLine.elevationId != -1; // && thirdClosestLine.elevationId != -1;
}
decimal CloseContourRanker::GetWeightedElevation() const
{
// We're guaranteed to have something in the closest line, but nothing in the other two.
decimal elevation = 0;
decimal inverseWeights = 0;
// Double the sqrt for a less drastic flow.
decimal distCL = closestLine.distanceSqd;
elevation += closestLine.elevation / distCL;
inverseWeights += (decimal)1.0 / distCL;
if (secondClosestLine.elevationId != -1)
{
distCL = secondClosestLine.distanceSqd;
elevation += secondClosestLine.elevation / distCL;
inverseWeights += (decimal)1.0 / distCL;
}
if (thirdClosestLine.elevationId != -1)
{
distCL = thirdClosestLine.distanceSqd;
elevation += thirdClosestLine.elevation / distCL;
inverseWeights += (decimal)1.0 / distCL;
}
return elevation / inverseWeights;
}
| 30.631944
| 122
| 0.642938
|
e21caccad2987f358bc4684dfe0e79cc8a9f7b18
| 3,455
|
py
|
Python
|
modules/alerts/MysqlStore.py
|
crashdump/iatt
|
e7515587d891858686ed3b4790479392a52eb712
|
[
"BSD-3-Clause"
] | null | null | null |
modules/alerts/MysqlStore.py
|
crashdump/iatt
|
e7515587d891858686ed3b4790479392a52eb712
|
[
"BSD-3-Clause"
] | null | null | null |
modules/alerts/MysqlStore.py
|
crashdump/iatt
|
e7515587d891858686ed3b4790479392a52eb712
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
CREATE TABLE `arpevents` (
`datetime` DATETIME NOT NULL,
`mac` CHAR(18) NOT NULL,
`ip` CHAR(20) NOT NULL,
`lasteventname` MEDIUMTEXT NOT NULL,
`actionsoutput` MEDIUMTEXT NOT NULL,
PRIMARY KEY (`mac`)
)
ENGINE=InnoDB;
"""
import mysql.connector
con = None
def alert(mac, ip, datetime, last_event_name, actionsoutput):
try:
aoutput = ''
for k, v in actionsoutput.items():
aoutput = aoutput + '<action name="%s">%s</action>\r\n' % (k, v)
con = mysql.connector.connect(
host = "localhost",
user = "mysql",
password = "",
database = "iatt"
)
cur = con.cursor()
q = """INSERT INTO arpevents (datetime, mac, ip, lasteventname, actionsoutput) VALUES ('%s','%s','%s', '%s', '%s') ON DUPLICATE KEY UPDATE mac = '%s'""" % \
(datetime, mac, ip, con.escape_string(last_event_name), con.escape_string(aoutput), mac)
cur.execute(q)
# data = cur.fetchone()
con.commit()
return "Pushed to the db."
except mysql.connector.Error as e:
print("Error %d: %s" % (e.args[0],e.args[1]))
return "Error while inserting in db"
finally:
if con:
con.close()
return "Pushed to the db."
"""
You can use this php code to display the data in a browser:
<html>
<head>
<title>Mac Map</title>
<style type="text/css">
table.db-table { border-right:1px solid #ccc; border-bottom:1px solid #ccc; }
table.db-table th{ background:#eee; padding:5px; border-left:1px solid #ccc; border-top:1px solid #ccc; }
table.db-table td{ padding:5px; border-left:1px solid #ccc; border-top:1px solid #ccc; }
</style>
</head>
<body>
<?php
$mysqli = new mysqli('localhost','arpaction','_youramazingpasswordhere_','arpaction');
/* check connection */
if (mysqli_connect_errno()) { printf("Connect failed: %s\n", mysqli_connect_error()); exit(); }
$query = "SELECT `datetime`, `mac`, `ip`, `lasteventname`, `actionsoutput` FROM `arpaction`.`arpevents` ORDER BY `ip` ASC LIMIT 500;";
if ($result = $mysqli->query($query)) {
print '<table cellpadding="0" cellspacing="0" class="db-table">'.PHP_EOL;
print '<tr><th>Datetime</th><th>MAC</th><th>IP</th><th>Last Event</th><th>Hostname</th><th>Nmap</th></tr>'.PHP_EOL;
/* fetch associative array */
while ($row = $result->fetch_assoc()) {
$string = preg_match_all('#^\<action name="(.*)"\>(.*)\</action\>#msU', $row["actionsoutput"], $actionsoutput);
$action_modules_titles = $actionsoutput[1];
$action_modules_results = $actionsoutput[2];
print '<tr>'.PHP_EOL;
print '<td>'.$row["datetime"].'</td>'.PHP_EOL;
print '<td>'.$row["mac"].'</td>'.PHP_EOL;
print '<td>'.$row["ip"].'</td>'.PHP_EOL;
print '<td>'.substr($row["lasteventname"], 0, 10).'</td>'.PHP_EOL;
print '<td>'.$action_modules_results[0].'</td>'.PHP_EOL;
print '<td><span title="'.htmlspecialchars($action_modules_results[1]).'">'.substr($action_modules_results[1], 0, 64).'</span></td>'.PHP_EOL;
print '</tr>'.PHP_EOL;
}
print '</table><br />'.PHP_EOL;
/* free result set */
$result->free();
}
/* close connection */
$mysqli->close();
?>
</body>
</html>
"""
# vim: noai:ts=4:sw=4
| 34.89899
| 164
| 0.576845
|
a3ba52f6bac2e34cde028da6b220dd2724a0b5c5
| 4,101
|
java
|
Java
|
libraries/util/src/test/java/com/paritytrading/parity/util/InstrumentsTest.java
|
zzwlstarby/parity
|
437cad8f3348617142dd312e4afd9f4054658cf8
|
[
"Apache-2.0"
] | 1
|
2020-11-07T19:41:25.000Z
|
2020-11-07T19:41:25.000Z
|
libraries/util/src/test/java/com/paritytrading/parity/util/InstrumentsTest.java
|
zzwlstarby/parity
|
437cad8f3348617142dd312e4afd9f4054658cf8
|
[
"Apache-2.0"
] | 4
|
2020-06-21T13:54:25.000Z
|
2020-07-07T15:50:08.000Z
|
libraries/util/src/test/java/com/paritytrading/parity/util/InstrumentsTest.java
|
zzwlstarby/parity
|
437cad8f3348617142dd312e4afd9f4054658cf8
|
[
"Apache-2.0"
] | null | null | null |
/*
* Copyright 2014 Parity authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.paritytrading.parity.util;
import static org.junit.Assert.*;
import com.typesafe.config.Config;
import com.typesafe.config.ConfigFactory;
import org.junit.Test;
public class InstrumentsTest {
private static final Instruments FRACTIONS = fromString("" +
"instruments = {\n" +
" price-integer-digits = 4\n" +
" size-integer-digits = 8\n" +
" FOO {\n" +
" price-fraction-digits = 2\n" +
" size-fraction-digits = 0\n" +
" }\n" +
" BAR {\n" +
" price-fraction-digits = 6\n" +
" size-fraction-digits = 8\n" +
" }\n" +
"}");
private static final Instruments INTEGERS = fromString("" +
"instruments = {\n" +
" price-integer-digits = 4\n" +
" size-integer-digits = 8\n" +
" FOO {\n" +
" price-fraction-digits = 2\n" +
" size-fraction-digits = 0\n" +
" }\n" +
"}");
@Test
public void maxPriceFractionDigits() {
assertEquals(6, FRACTIONS.getMaxPriceFractionDigits());
}
@Test
public void maxSizeFractionDigits() {
assertEquals(8, FRACTIONS.getMaxSizeFractionDigits());
}
@Test
public void pricePlaceholder() {
assertEquals(" - ", FRACTIONS.getPricePlaceholder());
}
@Test
public void sizePlaceholder() {
assertEquals(" - ", FRACTIONS.getSizePlaceholder());
}
@Test
public void priceWidth() {
assertEquals(11, FRACTIONS.getPriceWidth());
}
@Test
public void sizeWidth() {
assertEquals(17, FRACTIONS.getSizeWidth());
}
@Test
public void priceFractionDigits() {
assertEquals(2, FRACTIONS.get("FOO").getPriceFractionDigits());
}
@Test
public void sizeFractionDigitsWithIntegers() {
assertEquals(0, FRACTIONS.get("FOO").getSizeFractionDigits());
}
@Test
public void sizeFractionDigitsWithFractions() {
assertEquals(8, FRACTIONS.get("BAR").getSizeFractionDigits());
}
@Test
public void priceFormat() {
assertEquals("%7.2f ", FRACTIONS.get("FOO").getPriceFormat());
}
@Test
public void sizeFormatWithIntegers() {
assertEquals("%8.0f ", FRACTIONS.get("FOO").getSizeFormat());
}
@Test
public void sizeFormatWithFractions() {
assertEquals("%17.8f", FRACTIONS.get("BAR").getSizeFormat());
}
@Test
public void priceFactor() {
assertEquals(100.0, FRACTIONS.get("FOO").getPriceFactor(), 0.0);
}
@Test
public void sizeFactorWithIntegers() {
assertEquals(1.0, FRACTIONS.get("FOO").getSizeFactor(), 0.0);
}
@Test
public void sizeFactorWithFractions() {
assertEquals(100000000.0, FRACTIONS.get("BAR").getSizeFactor(), 0.0);
}
@Test
public void sizeWidthWithIntegersOnly() {
assertEquals(8, INTEGERS.getSizeWidth());
}
@Test
public void sizePlaceholderWithIntegersOnly() {
assertEquals(" -", INTEGERS.getSizePlaceholder());
}
@Test
public void sizeFormatWithIntegersOnly() {
assertEquals("%8.0f", INTEGERS.get("FOO").getSizeFormat());
}
private static Instruments fromString(String s) {
Config config = ConfigFactory.parseString(s);
return Instruments.fromConfig(config, "instruments");
}
}
| 27.897959
| 77
| 0.600829
|
2fbb1d608df09af1e79eb18a2c5740454c56f01c
| 274
|
py
|
Python
|
tests/parts/clock.py
|
ycwn/pyhdl
|
6440cc40193b789e281eb10f12580f2b6df17d1a
|
[
"MIT"
] | null | null | null |
tests/parts/clock.py
|
ycwn/pyhdl
|
6440cc40193b789e281eb10f12580f2b6df17d1a
|
[
"MIT"
] | null | null | null |
tests/parts/clock.py
|
ycwn/pyhdl
|
6440cc40193b789e281eb10f12580f2b6df17d1a
|
[
"MIT"
] | null | null | null |
import pyhdl.core as core
import pyhdl.parts.clock as clk
from ..common import *
test_component(clk.clock.create(),
[
[], [], [], [], [], [], [], []
],
[
[ True ],
[ False ],
[ True ],
[ False ],
[ True ],
[ False ],
[ True ],
[ False ]
]
)
| 10.538462
| 34
| 0.463504
|
8c0a6bfd2bfa7dd1c4b03c843f94c6969c4bf5c5
| 763
|
swift
|
Swift
|
OMKit/Classes/WebView/WebViewAction.swift
|
mlibai/OMKit
|
ca38993e98c02f16e35a7dc6dfeeb2ce31d9fb25
|
[
"MIT"
] | null | null | null |
OMKit/Classes/WebView/WebViewAction.swift
|
mlibai/OMKit
|
ca38993e98c02f16e35a7dc6dfeeb2ce31d9fb25
|
[
"MIT"
] | null | null | null |
OMKit/Classes/WebView/WebViewAction.swift
|
mlibai/OMKit
|
ca38993e98c02f16e35a7dc6dfeeb2ce31d9fb25
|
[
"MIT"
] | null | null | null |
//
// WebViewAction.swift
// OMKit
//
// Created by mlibai on 2017/5/12.
// Copyright © 2017年 mlibai. All rights reserved.
//
import Foundation
extension WebViewAction {
public static let alert: WebViewAction = WebViewAction(rawValue: "omApp.alert")
// taskID, isSuccess, {userToken: }
public static let didFinishLogining: WebViewAction = WebViewAction(rawValue: "omApp.didFinishLogining")
}
/// WebView 行为
public struct WebViewAction: RawRepresentable, CustomStringConvertible {
public typealias RawValue = String
public let rawValue: String
public init(rawValue: String) {
self.rawValue = rawValue
}
public var description: String {
return rawValue
}
}
| 21.8
| 108
| 0.664482
|
a7b0b1d07f74079198004076e45213109d60fa90
| 38,862
|
css
|
CSS
|
explorations/atypi-tech-2021/main.css
|
simoncozens/varla-varfo
|
1d3a655d65ca4fd51f9a745eff028e5594adaa3c
|
[
"Apache-2.0"
] | 11
|
2021-03-17T11:11:09.000Z
|
2022-01-10T17:25:19.000Z
|
explorations/atypi-tech-2021/main.css
|
simoncozens/varla-varfo
|
1d3a655d65ca4fd51f9a745eff028e5594adaa3c
|
[
"Apache-2.0"
] | 17
|
2021-03-01T14:50:29.000Z
|
2021-12-01T12:58:54.000Z
|
explorations/atypi-tech-2021/main.css
|
simoncozens/varla-varfo
|
1d3a655d65ca4fd51f9a745eff028e5594adaa3c
|
[
"Apache-2.0"
] | 3
|
2021-03-08T11:21:31.000Z
|
2021-09-03T15:50:16.000Z
|
:root{
/* can't use this per @font-face rule */
font-synthesis: none;
--fine-user-zoom: 0;
--link-color: #0645ad;
--layout-border-color: #a7d7f9;
}
body {
font-size: calc(100% + 1pt * var(--fine-user-zoom, 0));
}
@font-face {
font-family: 'RobotoFlex';
src: url('./fonts/Roboto-Flex/RobotoFlex[slnt,wdth,wght,opsz].ttf') format('truetype');
font-weight: 100 900;
-xx-font-style: normal;
font-style: oblique 0deg -10deg;
}
@font-face {
font-family: 'AmstelVar';
src: url('./fonts/Amstelvar/Amstelvar-Roman[wdth,wght,opsz].ttf') format('truetype');
font-weight: 100 900;
font-style: normal;
}
@font-face {
font-family: 'AmstelVar';
src: url('./fonts/Amstelvar/Amstelvar-Italic[wdth,wght,opsz].ttf') format('truetype');
font-weight: 100 900;
font-style: italic;
}
/* essential layout rules copied from original wikipedia css*/
body {
margin: 0;
background-color: #f6f6f6;
}
div.thumb {
width: auto;
background-color: transparent;
}
/*
div.tright, div.floatright, table.floatright {
clear: right;
float: right;
}
div.tleft, div.floatleft, table.floatleft {
float: left;
clear: left;
}
div.tright {
margin: 0.5em 0 1.3em 0;
}
div.tleft {
margin: 0.5em 0 1.3em 0;
}
*/
div.thumbinner {
/* FIXME: With this border and padding, the element is not neutral
* to the base line grid, however, we don't control the size of
* the content either in that respect, so being conscious here would
* not *yet* resolve the problem.
padding: 0.1875em;
*/
/* text-align: center; */
border: 0.0625em solid #c8ccd1;
overflow: hidden;
display: block;
}
div.thumbinner > * {
background-color: #f1f2f3;
padding: 0.1875em;
}
div.thumbinner a.image{
display: block
}
.thumbinner {
min-width: 100px;
}
.thumbinner img.thumbimage {
display: block;
margin: auto;
}
.thumbcaption {
border: 0;
padding: 3px;
text-align: left;
}
html body.mediawiki .mbox-small {
clear: right;
/*float: right;*/
margin: 4px 0 4px 1em;
box-sizing: border-box;
width: auto/*238px;*/
font-size: 88%;
line-height: 1.25em;
}
/* navigation */
#mw-navigation h2 {
position: absolute;
top: -9999px;
}
#mw-head {
position: absolute;
top: 0;
right: 0;
width: 100%;
}
#mw-panel {
padding-left: 0.5em;
font-size: inherit;
position: absolute;
top: 0;
width: 10em;
left: 0;
}
#p-logo {
width: 10em;
height: 160px;
margin-bottom: 1em;
}
/* keep body out of the way of navigation */
.mw-body {
padding: 1.25em 1.5em 1.5em 1.5em;
}
.mw-body {
margin-top: -1px;
border: 1px solid var(--layout-border-color);
border-right-width: 1px;
border-right-width: 0;
background-color: var(--background);
}
.mw-body, #mw-data-after-content {
margin-left: 10em;
}
.mw-body, .parsoid-body {
direction: ltr;
padding: 1em;
}
.emptyPortlet {
display: none;
}
.mw-portlet h3 {
display: none;
}
#right-navigation {
float: right;
margin-top: 2.5em;
}
.vector-menu-tabs, .vector-menu-tabs a, #mw-head .vector-menu-dropdown h3 {
background-image: url(/w/skins/Vector/resources/skins.vector.styles/images/tab-separator.png?09d4b);
background-image: linear-gradient(to bottom,rgba(167,215,249,0) 0,var(--layout-border-color) 100%);
background-repeat: no-repeat;
background-size: 1px 100%;
}
.vector-menu-tabs {
background-position: left bottom;
float: left;
height: 2.5em;
padding-left: 1px;
}
vector-menu-tabs h3 {
display: none;
}
#p-personal {
right: 1em;
position: absolute;
top: 6px;
z-index: 100;
}
#mw-head-base {
margin-top: -5em;
height: 5em;
}
#mw-page-base {
background-color: #fbfbfb;
background-image: -webkit-gradient(linear,right top,right bottom,color-stop(50%,#ffffff),color-stop(100%,#f6f6f6));
background-image: -webkit-linear-gradient(top,#ffffff 50%,#f6f6f6 100%);
background-image: -moz-linear-gradient(top,#ffffff 50%,#f6f6f6 100%);
background-image: linear-gradient(to bottom,#ffffff 50%,#f6f6f6 100%);
background-position: bottom left;
height: 5em;
}
#p-logo a {
background-position: center center;
background-repeat: no-repeat;
display: block;
width: 10em;
height: 160px;
text-decoration: none;
}
.mw-wiki-logo {
background-image: url(./static/images/enwiki-2x.png);
background-size: 135px auto;
}
.vector-menu-portal .body ul, .vector-menu-portal .vector-menu-content ul {
list-style: none none;
margin: 0;
padding-top: 0.3em;
}
.vector-menu-portal {
margin: 0 0.6em 0 1.3em;
padding: 0.25em 0;
direction: ltr;
}
.vector-menu-tabs ul {
float: left;
height: 100%;
list-style: none none;
margin: 0;
padding: 0;
}
#p-personal .vector-menu-content-list {
display: flex;
flex-wrap: wrap;
flex-grow: 1;
justify-content: flex-end;
align-items: baseline;
}
#p-personal ul {
padding-left: 10em;
}
.mw-portlet ul {
list-style: none none;
margin: 0;
}
ul, ol {
margin: 0 0 0 1em;
padding: 0;
padding-left: 0px;
}
#toc ul{
list-style: none;
}
a {
text-decoration: none;
color: var(--link-color);
background: none;
}
#p-personal li {
float: left;
}
#pt-anonuserpage {
color: #54595d;
}
#pt-anonuserpage, #pt-userpage a {
background-image: url(./w/skins/Vector/resources/skins.vector.styles/images/user-avatar.svg?b7f58);
background-position: left 0.33333333em;
background-repeat: no-repeat;
background-size: 1.16666667em 1.16666667em;
padding-top: 0.5em !important;
padding-left: 16px !important;
}
.mw-portlet li {
margin-left: 0.75em;
padding-top: 0.5em;
line-height: 1.16666667;
}
.vector-menu-portal .body li, .vector-menu-portal .vector-menu-content li {
margin: 0;
padding: 0.25em 0;
word-wrap: break-word;
}
#left-navigation {
float: left;
margin-left: 10em;
margin-top: 2.5em;
margin-bottom: -2.5em;
}
.mw-body, #mw-head-base, #left-navigation, #mw-data-after-content, .mw-footer {
margin-left: 11em;
}
.vector-menu-tabs, .vector-menu-tabs a, #mw-head .vector-menu-dropdown h3 {
background-image: url(/w/skins/Vector/resources/skins.vector.styles/images/tab-separator.png?09d4b);
background-image: linear-gradient(to bottom,rgba(167,215,249,0) 0,var(--layout-border-color) 100%);
background-repeat: no-repeat;
background-size: 1px 100%;
}
.vector-menu-tabs {
background-position: left bottom;
float: left;
height: 2.5em;
padding-left: 1px;
}
.vector-menu-tabs .selected {
background: #ffffff;
}
.vector-menu-tabs li {
background-image: url(/w/skins/Vector/resources/skins.vector.styles/images/tab-normal-fade.png?1cc52);
background-image: linear-gradient(to top,var(--layout-border-color) 0,#e8f2f8 1px,#ffffff 100%);
background-position: left bottom;
background-repeat: repeat-x;
float: left;
display: block;
height: 100%;
margin: 0;
padding: 0;
line-height: 1.125em;
white-space: nowrap;
}
.vector-menu-tabs .selected a, .vector-menu-tabs .selected a:visited {
color: #202122;
text-decoration: none;
}
.vector-menu-tabs li a {
background-position: right bottom;
color: var(--link-color);
-webkit-box-sizing: border-box;
-moz-box-sizing: border-box;
box-sizing: border-box;
display: block;
float: left;
height: 3.07692308em;
position: relative;
padding-top: 1.25em;
padding-left: 8px;
padding-right: 8px;
/** font-size: 0.8125em; was 13px so 1em was 16px == 12 pt!
* but for now I stick to absolute font-sizes:
* 13px * 3/4 = 9.75pt
*/
--font-size: 9.75;
cursor: pointer;
}
#p-search {
float: left;
margin-right: 1em;
margin-left: 0.5em;
}
#p-search h3 {
display: block;
position: absolute !important;
clip: rect(1px,1px,1px,1px);
width: 1px;
height: 1px;
margin: -1px;
border: 0;
padding: 0;
overflow: hidden;
}
#p-search form {
margin: 0.5em 0 0;
}
#simpleSearch {
position: relative;
height: 100%;
}
#simpleSearch {
min-width: 5em;
width: 13.2em;
width: 20vw;
max-width: 20em;
}
#searchInput {
background-color: rgba(255,255,255,0.5);
color: #000000;
width: 100%;
height: 2.15384615em;
-webkit-box-sizing: border-box;
-moz-box-sizing: border-box;
box-sizing: border-box;
border: 1px solid #a2a9b1;
border-radius: 2px;
padding: 5px 2.15384615em 5px 0.4em;
-webkit-box-shadow: inset 0 0 0 1px transparent;
box-shadow: inset 0 0 0 1px transparent;
font-family: inherit;
/** font-size: 0.8125em; was 13px so 1em was 16px == 12 pt!
* but for now I stick to absolute font-sizes:
* 13px * 3/4 = 9.75pt
*/
--font-size: 9.75;
direction: ltr;
-webkit-transition: border-color 250ms,box-shadow 250ms;
-moz-transition: border-color 250ms,box-shadow 250ms;
transition: border-color 250ms,box-shadow 250ms;
-webkit-appearance: none;
-moz-appearance: textfield;
}
#searchButton, #mw-searchButton {
background-color: transparent;
position: absolute;
top: 1px;
bottom: 1px;
right: 1px;
min-width: 28px;
width: 2.15384615em;
margin: 0;
border: 0;
padding: 0;
cursor: pointer;
/** font-size: 0.8125em; was 13px so 1em was 16px == 12 pt!
* but for now I stick to absolute font-sizes:
* 13px * 3/4 = 9.75pt
*/
--font-size: 9.75;
direction: ltr;
text-indent: -99999px;
white-space: nowrap;
overflow: hidden;
z-index: 1;
}
.mw-indicators {
float: right;
z-index: 1;
}
.mw-body-content {
position: relative;
z-index: 0;
}
.mw-body-content {
font-size: 0.875em;
font-size: calc(1em * 0.875);
line-height: 1.6;
}
.mw-body .mw-indicators {
font-size: 0.875em;
line-height: 1.6;
position: relative;
}
.mw-body .mw-indicators {
padding-top: 0.4em;
}
.vector-menu-portal h3 {
display: block;
background-image: url(/w/skins/Vector/resources/skins.vector.styles/images/portal-separator.png?4ab04);
background-image: linear-gradient(to right,rgba(200,204,209,0) 0,#c8ccd1 33%,#c8ccd1 66%,rgba(200,204,209,0) 100%);
background-position: center bottom;
background-repeat: no-repeat;
background-size: 100% 1px;
color: #54595d;
margin: 0.5em 0 0 0.66666667em;
border: 0;
padding: 0.25em 0;
cursor: default;
}
.mw-editsection, .mw-editsection-like {
margin-left: 1em;
vertical-align: baseline;
}
/* hidden for now, complex stuff, SEO/Screenreader content */
.navbox, .printfooter, .mw-hidden-catlinks, .mw-normal-catlinks {
display: none;
}
.mw-footer {
padding: 1.25em;
}
.mw-footer ul {
list-style: none none;
margin: 0;
padding: 0;
}
#footer-places li {
float: left;
margin-right: 1em;
line-height: 2;
}
#footer-icons {
float: right;
}
#footer-icons li {
float: left;
margin-left: 0.5em;
line-height: 2;
text-align: right;
}
div.hatnote {
padding-left: 1.6em;
}
#contentSub, #contentSub2 {
margin: 0 0 1.4em 1em;
width: auto;
}
#contentSub2:empty {
display: none;
}
element {
}
.mw-jump-link:not(:focus) {
display: block;
position: absolute !important;
clip: rect(1px,1px,1px,1px);
width: 1px;
height: 1px;
margin: -1px;
border: 0;
padding: 0;
overflow: hidden;
}
.mw-body h1, .mw-body-content h1, .mw-body-content h2 {
margin-bottom: 0.25em;
padding: 0;
}
h1, h2, h3, h4, h5, h6 {
margin: 0;
padding: 0;
}
h2, h3, h4, h5, h6 {
margin-block-start: calc(1pt * var(--x-line-height) * var(--default-font-size));
}
.mw-body .firstHeading {
overflow: visible;
}
h1, h2 {
border-bottom: 1px solid #a2a9b1;
}
#p-logo + .mw-portlet h3 {
display: none;
}
/* widgets */
.container-toggle-user_settings:not(#page_portal-testbed *){
z-index: 999;
/* FIXME: on Firefox the filter property on :root makes
* "position: fixed" effectively into "position: absolute"
*/
position: sticky;
left: 0;
top: 0;
}
button.toggle-user_settings:not(#page_portal-testbed *) {
position: absolute;
}
button.toggle-user_settings {
border: 1px solid lightgray;
background: #ffffff82;
border-radius: .25em;
}
.widgets_container {
max-width: 27rem;
border: 1px solid black;
background: #fffffff4;
padding: .5em 1em;
margin: 1em;
border-radius: 0.4em;
box-shadow: -2px 2px 10px black;
}
.page_content-item .widgets_container{
z-index: 998;
position: absolute;
}
.widgets_container > * {
margin: .5em 0;
display: block;
}
.widgets_container label {
margin-bottom: .3em;
display: inline-block;
}
.widgets_container button.close{
margin-left: auto;
--font-weight: 600;
}
.widgets_container legend{
font-family: AmstelVar;
--font-weight: 600;
}
.widgets_container h3 {
font-family: AmstelVar;
margin: .5em 0;
--font-weight: 500;
}
label.user_preferences-fine_user_zoom::after {
content: attr(data-value) 'pt';
}
.widgets_container label.portal_augmentation-justification_option{
display: block;
margin-left: 1.5em;
}
label.user_preferences-user_distance::after {
content: attr(data-value) 'cm';
}
.widgets_container input[type="range"] {
vertical-align: middle;
width: 9em;
}
.page_content-item {
}
#page_portal-testbed,
:fullscreen::backdrop {
background: darkgrey;
--background: darkgrey;
}
.page_portal-testbed body{
background: none;
}
.testbed-subject_container {
}
.page_portal-testbed .widgets_container{
margin: 1em;
}
#testbed-subject {
width: 1024px;
height: 768px;
display: block;
margin: auto;
}
.portal_properties-set_orientation{
visibility: hidden;
position: absolute;
cursor: pointer;
}
.portal_properties-set_orientation ~ span:after {
content: " Landscape";
}
.portal_properties-set_orientation ~ span:before {
display: inline-block;
content: " ";
background: lightgrey;
border: 1px solid black;
border-radius: .1em;
vertical-align:middle;
width: 1em;
height: .5em;
}
.portal_properties-set_orientation:checked ~ span:after {
content: " Portrait";
}
.portal_properties-set_orientation:checked ~ span:before {
width: .5em;
height: 1em;
}
.portal_augmentation-run_justification_checkbox > input,
.portal_augmentation-switch_grade_checkbox > input{
visibility: hidden;
position: absolute;
cursor: pointer;
}
.portal_augmentation-run_justification_checkbox > input ~ span:after,
.portal_augmentation-switch_grade_checkbox > input ~ span:after {
display: inline-block;
background: #f5f4f3;
border: 0.0625em solid #edebe9;
border: 0.0625em outset ThreeDLightShadow;
padding: 0.4em;
border-radius: .2em;
}
.portal_augmentation-run_justification_checkbox > input ~ span:hover:after,
.portal_augmentation-switch_grade_checkbox > input ~ span:hover:after {
background: #f9f8f7;
}
.portal_augmentation-switch_grade_checkbox > input ~ span:before {
content: "is off ";
}
.portal_augmentation-switch_grade_checkbox > input:checked ~ span:before {
content: "is on ";
}
.portal_augmentation-switch_grade_checkbox > input ~ span:after {
content: "turn on";
}
.portal_augmentation-switch_grade_checkbox > input:checked ~ span:after {
content: "turn off";
}
label.portal_augmentation-amplify_grade::after {
content: "×" attr(data-value);
}
.portal_augmentation-run_justification_checkbox > input ~ span:after{
content: "▶ run";
}
.portal_augmentation-run_justification_checkbox > input:checked ~ span:after{
content: "▮▮ pause";
}
/* use :root for custom properties
* "*" destroys inheritance!
*
* We should set all necessary font-styles per element that defines
* font-styles.
* */
:root {
/* beginnings of the CSS framework */
/* I think the plan is to set high level variables, and depending
* on the font **generate** css that uses them the best way.
* So, a font/font-family can have capabilities, we detect them, and
* according to that, wire up these variables.
*/
/*
* Uh, another thing is that not all of the standard properties below
* do inherit. so we need to specify them explicitly for some elements!
* e.g. columns, column-width, column-count (but column-width ismore or less
* useless when column count is 1 anyways.)
* width
*/
/* page/portal info */
/*--column-width: 32.5; /* in em */
/*--column-count: 1; /* maybe use actual container width to determine this? */
/* --font choice/variation settings*/
--font-family: AmstelVar;
--grad-animation-name: AmstelVar-grad-by-font-size;
--font-style: normal; /* oblique is basically broken */
--font-slant: 0;
--font-weight: 400; /* depending on font/font-family this can go anywhere */
--font-grade: 0;
/* used to be 520 but making it narrow to work out justification */
/* interestingly, vabro.js suggests to sart at 375 and to max out at 402
* was 440 and I thought it was tight.
* */
--font-stretch: 375;/* by (xtra, wdth) */
/* so far only experimental used in OLD_justifyLine*/
--font-stretch-change: 0;
--font-width: 100; /* this goes into wdth */
/* in pt! We need JS to get the document default font-size in pt,
* because we can't yet calculate it
* https://github.com/w3c/csswg-drafts/issues/6149
*/
/* Must be invalid initially or it will influence the javascript-determined value.*/
--default-font-size: "invalid";
--font-size: var(--default-font-size); /* --opsz */
--font-size-change-by-distance: calc(var(--user-distance-cm, 0) / 25);
/* FIXME: depending on line-length these should change
* with runion-01 we have always shorter lines, hence these are not of use
* anymore:
* --line-height: 1.5;
* --letter-space: 0.1;
* --word-space: 0.1;/*in fractions of em
* HOWEVER, in a long-line situation, the tool should do something like
* the above, and also the below is likely too static.
*/
--line-height: 1.3;
--letter-space: 0;
--word-space: 0;/*in fractions of em*/
}
/**
columns /* caution: e.g. firefox has bad support for break-after * /
orphans
widows
**/
*{
/*column-width: calc(1pt * var(--x-font-size) * var(--column-width));
/*column-count: var(--column-count);*/
/*width: calc(1em * var(--column-width));*/
font-family: var(--font-family);
font-style: var(--font-style);
font-size: calc(1pt * var(--x-font-size));
letter-spacing: calc(1pt * var(--letter-space));
line-height: var(--x-line-height);
word-spacing: calc(1em * var(--word-space));
/* This case is better set on each element, seems to be calculated in the
* wrong place otherwise. I.e. not using the --font-size from sub{--font-size: 8;}
* but from :root{--font-size: 12;}
*/
--x-font-size: calc(var(--font-size) + var(--fine-user-zoom, 0) + var(--font-size-change-by-distance, 0));
/* FIXME: not sure if the distance change should be implemented here!
* actually, the runion takes care of --line-height, so it should do this
* as well.
* I wonder if the following meant that for some distances we want to
* have line-space bigger than 1.2 but never smaller than, or if it
* was a mix up between min and max, and min was meant.
* --x-line-height: max(1.2, calc(var(--line-height) + (-1 * var(--font-size-change-by-distance, 0) * 0.05)));
*/
--x-line-height: max(1, min(1.2, calc(var(--line-height) + (-1 * var(--font-size-change-by-distance, 0) * 0.05))));
--x-font-grade: calc(var(--font-grade, 0) * var(--amplify-grade, 1) * var(--toggle-grade, 1));
--x-font-stretch: calc(var(--font-stretch) + var(--font-stretch-change, 0));
--x-font-weight: calc(var(--font-weight) + var(--font-weight-change, 0));
font-weight: var(--x-font-weight);
font-variation-settings:
"slnt" var(--font-slant),
"wght" var(--x-font-weight),
"GRAD" var(--x-font-grade),
"XTRA" var(--x-font-stretch),
"opsz" var(--x-font-size),
"wdth" var(--font-width)
;
/* This is not required for the variable fonts, as font-variation-settings
* will win, however, it helps with the type-ramp bookmarklet to
* eliminate false doubles and it does not do any harm.
*/
font-weight: var(--x-font-weight);
}
/* All about line raster/grid consistency across columns. */
/* no longer for p, ul, as we use text-indent and ul has it's own indent style.*/
table,
.thumb,
ul, ol.not(:first-child),
.templatequote{
/* likely not what we're going to use, but it aligns with the line height. */
margin-block-start: calc(1pt * var(--x-font-size) * var(--x-line-height));
}
.runion-01 .hatnote,
table,
.thumb,
ul, ol,
.templatequote {
margin-block-end: calc(1pt * var(--x-font-size) * var(--x-line-height));
}
p, ul ul{
margin-block: 0;
}
h2 + p,
h3 + p {
margin-block-start: 0;
}
table{
border-spacing: 0;
}
b, strong {
--font-weight: 700;
}
/* LIGHT/DARK MODE CHOLOR-SCHEME */
:root {
/* Only in dark mode, but it depends on font-size and weight
* where font-size selects 2 optically chosen grade adjustments
* for weights between 400 and 700 and those must be interpolated
* by the actual weight value.
*
* The values by font size could also be interpolated, but since
* they are 1pt apart, they could also be chosen by to integer
* rounded font-size, between 10 and 18.
*
* Also, doing a grep on this document, we only use --font-weight 400, 600, 700
* so, in our case interpolating is not necessary and just looking up
* values by font-size would suffice. Note though, that looking up values
* is the hard part.
*
* Maybe a lookup table could be created using @keyframes>???
* one dimension is chosen by the animation name
* one dimension is chosen by the animation position
* since the font-size is has optically/arbitrary selected values,
* it should probably be the animation name, BUT, for it's the less
* controlled value, it may better be the position! ....
*
* maybe the custom properties set by the keyframes do not interpolate
* but with enough keyframes, it could be good enough!
*/
--light-background: #fff;
--light-color: #000;
--light-mode-invert: 0;
--dark-mode-invert: 1;
--dark-link-color: #6f5608;
--dark-layout-border-color: #a06b27;
/* default */
--background: var(--light-background);
--color: var(--light-color);
--mode-invert: var(--light-mode-invert);
}
/* At least for debugging, these help immensely as the properties show
* up calc()-ulated! in the developer tools of Chromium under computed
* values! But foremost, @properties make the animations interpolate.
*/
@property --grad-400 {
syntax: '<number>';
inherits: true;
initial-value: 0;
}
@property --grad-700 {
syntax: '<number>';
inherits: true;
initial-value: 0;
}
/* For testing in the Javasscript fallback.*/
@property --grad-supported {
syntax: '<number>';
inherits: true;
initial-value: 0;
}
@property --grad-animation-progression {
syntax: '<number>';
inherits: true;
initial-value: 0;
}
@property --font-grade {
syntax: '<number>';
inherits: true;
initial-value: 0;
}
/* Works together with the @properties nicely in Chromium, but Firefox
* support is not soon to expect. A solution seems to be to set
* --grad-400 and --grad-700 by hand/directly in CSS, whenever --font-size,
* --font-family, or --font-weight is set, but it wouldn't take into
* account the final absolute --x-font-size, which makes it rather unusable.
* So probably JavaScript has to walk through all elements and set those
* --grad-X00 variables where one of the three input variables changes
* compared to the parent.
* I keep this though, because it's impressive what can be done with the
* new CSS stuff and it's a good actual working example. Javascript will
* be a fallback, need to feature detect.
*/
@keyframes RobotoFlex-grad-by-font-size {
/* From 10 to 18 and a step size of 1 we have 8 steps: 12.5 % each.*/
0% { /* 10 pt*/
--grad-supported: 1;
--grad-400: 0;
--grad-700: 0;
}
12.5% { /* 11 pt*/
--grad-400: -6;
--grad-700: -6;
}
25% { /* 12 pt*/
--grad-400: -9;
--grad-700: -10;
}
37.5% { /* 13 pt*/
--grad-400: -12;
--grad-700: -15;
}
50% { /* 14 pt*/
--grad-400: -14;
--grad-700: -20;
}
62.5% { /* 15 pt*/
--grad-400: -16;
--grad-700: -30;
}
75% { /* 16 pt*/
--grad-400: -18;
--grad-700: -38;
}
87.5% { /* 17 pt*/
--grad-400: -22;
--grad-700: -43;
}
100% { /*18 ot */
--grad-supported: 1;
--grad-400: -24;
--grad-700: -54;
}
}
@keyframes AmstelVar-grad-by-font-size {
/* From 10 to 18 and a step size of 1 we have 8 steps: 12.5 % each.*/
0% { /* 10 pt*/
--grad-supported: 1;
--grad-400: -3;
--grad-700: -80;
}
12.5% { /* 11 pt*/
--grad-400: -6;
--grad-700: -90;
}
25% { /* 12 pt*/
--grad-400: -10;
--grad-700: -95;
}
37.5% { /* 13 pt*/
--grad-400: -12;
--grad-700: -100;
}
50% { /* 14 pt*/
--grad-400: -14;
--grad-700: -105;
}
62.5% { /* 15 pt*/
--grad-400: -16;
--grad-700: -110;
}
75% { /* 16 pt*/
--grad-400: -18;
--grad-700: -115;
}
87.5% { /* 17 pt*/
--grad-400: -22;
--grad-700: -120;
}
100% { /*18 ot */
--grad-supported: 1;
--grad-400: -24;
--grad-700: -125;
}
}
/* This is a placeholder to switch on/off existing animation
* using custom properties in animation-name.
*/
@keyframes no-animation {}
/* Font-Grade/GRAD setting
*
* Use this to turn all of this on, see dark-mode settings:
* --grad-active-animation: var(--grad-animation-name);
*
* It's a pity we can't synthesize the animation name, as in `content` e.g.:
* animation-name: var(--font-family) "-grad-by-font-size";
* OR
* --grad-animation-name: var(--font-family) "-grad-by-font-size";
* Which is a bummer, because it basically means we must set
* animation-name next to --font-family everywhere.
*/
* {
/* (undefined) will tell us in the fallback javascript that
* the @keyframes are not supported.
* --grad-400: (undefined);
* --grad-700: (undefined);
* NOTE: can't use this as in Firefox, without @property, it breaks
* all of the font-variation-settings. Instead we'll just check:
* if(getComputedStyle(elem).getPropertyValue('--grad-400') === '') ...;
*/
animation-play-state: paused;
animation-fill-mode: both;
animation-timing-function: linear;
/* depends on font-size:
* 10 and lower than = 0
* 18 and greater than = 1
*/
--grad-min-font-size: 10;
--grad-max-font-size: 18;
--grad-actual-font-size: clamp(
var(--grad-min-font-size),
var(--x-font-size),
var(--grad-max-font-size)
);
--grad-animation-progression: calc(
(var(--grad-actual-font-size) - var(--grad-min-font-size))
/ (var(--grad-max-font-size) - var(--grad-min-font-size))
);
animation-duration: 1s;
animation-delay: calc(-1s * var(--grad-animation-progression));
--grad-active-animation: no-animation;
animation-name: var(--grad-active-animation);
/* --font-grade:
* Must be only applied in dark mode!
* There's no other grade changing rule, however, this property
* is already set so that it only switches on in dark mode
*/
--font-grade: calc(
(var(--grad-700, 0) - var(--grad-400, 0))
* ( (clamp(400, var(--x-font-weight), 700) - 400) / (700 - 400))
+ var(--grad-400, 0)
);
}
@media (prefers-color-scheme: dark) {
:root{
/* This is actually the inverted color of what we really want:
* a nice light sky blue (according to the color-picker: #7b91d4)
* */
--link-color: var(--dark-link-color);
--layout-border-color: var(--dark-layout-border-color);
--mode-invert: var(--dark-mode-invert);
}
* {
--grad-active-animation: var(--grad-animation-name);
}
.user_preferences-color_scheme-default:after{
content: " (dark)";
}
}
:root.explicit-dark-mode{
--link-color: var(--dark-link-color);
--layout-border-color: var(--dark-layout-border-color);
--mode-invert: var(--dark-mode-invert);
}
.explicit-dark-mode * {
--grad-active-animation: var(--grad-animation-name);
}
@media (prefers-color-scheme: light){
:root{
--background: var(--light-background);
--color: var(--light-color);
--mode-invert: var(--light-mode-invert);
}
.user_preferences-color_scheme-default:after{
content: " (light)";
}
}
:root.explicit-light-mode{
--background: var(--light-background);
--color: var(--light-color);
--mode-invert: var(--light-mode-invert);
}
:root{
background: var(--background);
color: var(--color);
filter: invert(var(--mode-invert, 0))
}
img {
/* Invert back; this is quite elegant! */
filter: invert(var(--mode-invert, 0))
}
/* Attempts to use the framework to reproduce the original font-settings. */
#mw-head,
#mw-panel {
--font-family: RobotoFlex;
--grad-animation-name: RobotoFlex-grad-by-font-size;
--font-stretch: 400;
}
.vector-menu-portal .body li, .vector-menu-portal .vector-menu-content li {
--font-size: calc(3/4 * var(--default-font-size));
--line-height: 1.125;
word-wrap: break-word;
}
h1, h2 {
--font-family: RobotoFlex;
--grad-animation-name: RobotoFlex-grad-by-font-size;
--font-stretch: 400;
}
h1 {
--font-size: calc(9/5 * var(--default-font-size));
--font-weight: 600;
}
h2 {
--font-size: calc(3/2 * var(--default-font-size));
--font-weight: 400;
}
.mw-body-content h3, .mw-body-content h4 {
--font-family: AmstelVar;
--grad-animation-name: AmstelVar-grad-by-font-size;
--font-stretch: 400;
--font-size: calc(6/5 * var(--default-font-size));
--font-weight: 600;
}
.vector-menu-portal h3 {
--font-family: RobotoFlex;
--grad-animation-name: RobotoFlex-grad-by-font-size;
--font-size: calc(3/4 * var(--default-font-size));
--font-weight: 400;
}
.mw-editsection, .mw-editsection-like {
--font-size: calc(13/16 * var(--default-font-size));
--font-weight: 400;
--line-height: 1;
}
@property --sup-scale {
syntax: '<number>';
inherits: true;
initial-value: 0;
}
/* get font size and
*
* * .57 scale size for 14 pt
* for 8 pt = .65,
* for 144 pt. .25
*/
@keyframes synth-sub-and-super-script {
0% { /* 8pt */
--sup-scale: .65
}
4.41% { /* 14pt (14-8)/(144-8) * 100 */
--sup-scale: .57;
}
100% { /* 144pt */
--sup-scale: .25;
}
}
sup, sub{
/*This is a copy of the original --x-font-size, will always have
* to follow the original. */
--xo-font-size: calc(var(--font-size) + var(--fine-user-zoom, 0) + var(--font-size-change-by-distance, 0));
/* the redefines --x-font-size, with our scaling factor that way,
* it goes directly into OPSZ etc., without us changing the definitions
*/
--no-animation: no-animation;
animation-name: synth-sub-and-super-script, var(--grad-active-animation);
animation-play-state: paused, paused;
animation-fill-mode: both, both;
animation-timing-function: linear, linear;
/* depends on font-size:
* 10 and lower than = 0
* 18 and greater than = 1
*/
--sup-min-base-font-size: 8;
--sup-max-base-font-size: 144;
--sup-actual-base-font-size: clamp(
var(--sup-min-base-font-size),
var(--xo-font-size),
var(--sup-max-base-font-size)
);
--sup-animation-progression: calc(
(var(--sup-actual-base-font-size) - var(--sup-min-base-font-size))
/ (var(--sup-max-base-font-size) - var(--sup-min-base-font-size))
);
animation-duration: 1s, 1s;
animation-delay: calc(-1s * var(--sup-animation-progression)),
calc(-1s * var(--grad-animation-progression));
;
--x-font-size: calc(var(--xo-font-size) * var(--sup-scale));
/* If there’s a width axis, increase it to 110%,
* or * 1.1 if wdth is a non-standard value.
* FIXME: In which case would it be a non-standard value?
* FIXME: In here, we use rather XTRA for width, is it that?
*/
--font-width: 110; /* The registered axis default is 100 */
/* If there’s a wght axis, increase it by 50,
* or * 1.125 if wght is a non-standard value
* FIXME: In which case would it be a non-standard value?
*/
--font-weight-change: calc(var(--font-weight) * 0.125);
}
sup *, sub *{
font-size: inherit;
--x-font-size: inherit;
}
sup, sup * {
/*baseline shift = cap ht * .57 - cap ht
* FIXME: since I can't access cap-height directly I go with
* standard vertical-align: top, which doesn't interfere
* with the line-height as well, e.g. the value "super"
* does!
*/
vertical-align: top;
}
sub, sub * {
/* Else footnote inferiors
* baseline shift = cap ht / 6
* FIXME: same considerations as above with <sup> + baseline shift
*/
vertical-align: bottom;
}
#p-personal li {
--font-size: calc(3/4 * var(--default-font-size));
}
b, strong {
--font-weight: 600;
}
i, cite, em, var, dfn {
--font-style: italic;
}
.hatnote {
--font-style: italic;
}
.mw-body h1, .mw-body-content h1, .mw-body-content h2 {
/* Positioning the headline underline (border-bottom) closely to the box.
* Too this from original markup, I'm not sure this is the best way.*/
--line-height:1.3;
}
.runion-01 {
/*
--column-count: columns;
--column-width-en: lineLengthEn;
--column-gap-en: columnGapEn;
--padding-left-en: paddingLeftEn;
--padding-right-en: paddingRightEn;
*/
columns: calc(.5em * var(--column-width-en, 0)) var(--column-count, 1);
padding-left: calc(.5em * var(--padding-left-en, 0));
padding-right: calc(.5em * var(--padding-right-en, 0));
column-gap: calc(.5em * var(--column-gap-en, 0));
/* FIXME/TODO: what about H&J, especially in Chrome and Safari?
* ALSO: how to detect and change the setup when hyphens are not available?
*
* As of now confirmed working with the current testing page as it is,
* in English:
* Firefox for openSuse - Tumbleweed Version 86.0.1 (64-bit)
* Chromium Version 89.0.4389.90 (openSUSE Build) (64-bit)
*
* https://css-tricks.com/almanac/properties/h/hyphenate/
*
* There are it seem related properties to configure hyphenation
* hyphenate-limit-last, hyphenate-limit-zone etc.
* Safari 5+ requires -webkit-, Firefox 6+ requires -moz-, IE 10+ requires -ms-, iOS 4.2+ requires -webkit-.
*/
word-wrap: break-word;
overflow-wrap: break-word;
-webkit-hyphens: auto;
-moz-hyphens: auto;
-ms-hyphens: auto;
hyphens: auto;
text-align: left;
/* FIXME: should vary with line-length etc. */
text-indent: 2em;
/* text-align: justify;
* We're doing the justification ourselves, initially, left-aligned
* is required, because we can use it to measure the empty space that
* is left on the line.
*/
/*text-justify: inter-word; auto is OK so far as well*/
/* Firefox does not support these! */
orphans: 3;
widows: 3;
}
/* no text-indent*/
.runion-01 .hatnote,
.runion-01 ul,
.runion-01 ol,
.runion-01 h1,
.runion-01 h2,
.runion-01 h3{
text-indent: 0;
}
/* FIXME .hatnote may have margin-block, but should
* not indent, it's already italic anyways.
* */
.runion-01 .hatnote{
padding-inline-start: 0;
/* because it's usually really short */
hyphens: none;
}
.runion-01.runion-activated {
white-space: nowrap;
}
/* FIXME: Apply the class .fix-short-section automatically, it's
* currently set manually on the <p> of the sub-section "Etymology".
*/
.runion-01.runion-activated .fix-short-section {
column-span: all;
width: calc(.5em * var(--column-width-en, 0));
}
/* these are sectioning elements */
.runion-01 h1,
.runion-01 h2,
.runion-01 h3 {
column-span: all;
}
.runion-01 div.tright,
.runion-01 div.tleft {
float:none;
margin-left:0;
margin-right: 0;
/* FIXME: this stuff distracts from the column layout, needs resolution.
display: none;*/
}
/* Control line-wrapping.
*
* Chrome cannot do it's hyphenation between two spans, so after
* wrapping the text-nodes into the .runion-line spans, line breaking
* changes.
* Firefox keeps the same layout without any trouble, but, since we plan
* to change the font-spec for each line to fit better, this must be a
* bomb-proof way to ensure each line stays where it is.
*/
.runion-line {
/* This is forcing the browser.*/
white-space: nowrap;
/* This is the cool when we toggle the color-coded-lines class*/
transition: background 3s;
}
.color-coded-lines .runion-line{
/* with small line-heights and color coded line backgrounds, we
* tend to clip descenders.
* FXME: must be removed when no line color coding is desired,
* especially Chromium has little rendering errors in column layout
* here.
*/
mix-blend-mode: darken;
background: var(--line-color-code, bisque);
transition: background .5s;
}
.skip-justify {
white-space: normal;
}
/**
* Introduce explicit wrapping before each line
* This normalizes the behavior of Chrome and Firefox, both loose
* their hyphenation with the first line ::before display: block rule
* now, we need to mark elements in JS explicitly that require hyphens!.
*/
.runion-line.r00-l-first::before{
content: '';
display: block;
}
.runion-line.r00-first-line.r00-l-first::before
{
content: unset;
display: unset;
}
/*
* This fixes line wrapping, not changing in Chrome (nor Firefox). This
* has the advantage, that the last-line in a paragraph is aligned left
* and the other lines are justified by word-space, exactly what we are
* looking for.
* The heuristic that the adds the hyphen class may still have flaws!
*/
.runion-line.r00-l-hyphen::after {
content: '-'; /* I tried using soft hyphen ­ '\00AD' but it
doesn't do anything*/
}
| 24.959538
| 119
| 0.629535
|
af635e5b2fd5eafb419ce0ba08dedd670adcf8b7
| 24,061
|
py
|
Python
|
openfda/drugsfda/pipeline.py
|
FDA/openfda
|
93c3abed4042a4a2729975468c4e377a67e8a5ca
|
[
"CC0-1.0"
] | 388
|
2015-01-09T18:50:35.000Z
|
2022-03-24T10:15:23.000Z
|
openfda/drugsfda/pipeline.py
|
FDA/openfda
|
93c3abed4042a4a2729975468c4e377a67e8a5ca
|
[
"CC0-1.0"
] | 150
|
2015-01-21T20:30:54.000Z
|
2022-03-28T20:46:29.000Z
|
openfda/drugsfda/pipeline.py
|
FDA/openfda
|
93c3abed4042a4a2729975468c4e377a67e8a5ca
|
[
"CC0-1.0"
] | 113
|
2015-01-31T21:24:16.000Z
|
2022-01-30T15:17:28.000Z
|
#!/usr/local/bin/python
'''
Pipeline for converting Drugs@FDA files to JSON and importing into Elasticsearch.
'''
import glob
import os
import re
from os.path import join
import logging
import arrow
import luigi
from openfda import common, config, parallel, index_util
from openfda.annotation_table.pipeline import CombineHarmonization
from openfda.common import first_file_timestamp
from openfda.drugsfda.annotate import AnnotateMapper
DOWNLOAD_FILE = 'https://www.fda.gov/media/89850/download'
BASE_DIR = join(config.data_dir(), 'drugsfda')
EXTRACTED_DIR = join(BASE_DIR, 'extracted')
RAW_DATA_FILE = join(BASE_DIR, 'raw/drugsfda.zip')
PRODUCTS_DB = join(BASE_DIR, 'json/products.db')
APPLICATIONS_DB = join(BASE_DIR, 'json/applications.db')
APPLICATIONS_DOCS_DB = join(BASE_DIR, 'json/applicationsdocs.db')
SUBMISSIONS_DB = join(BASE_DIR, 'json/submissions.db')
SUBMISSION_PROPERTY_TYPE_DB = join(BASE_DIR, 'json/submissionpropertytype.db')
MARKETING_STATUS_DB = join(BASE_DIR, 'json/marketingstatus.db')
ANNOTATED_DB = join(BASE_DIR, 'json/annotated.db')
TE_DB = join(BASE_DIR, 'json/te.db')
MERGED_DB = join(BASE_DIR, 'json/merged.db')
class DownloadDrugsFDAFiles(luigi.Task):
def requires(self):
return []
def output(self):
return luigi.LocalTarget(RAW_DATA_FILE)
def run(self):
common.download(DOWNLOAD_FILE, RAW_DATA_FILE)
class ExtractDrugsFDAFiles(luigi.Task):
def requires(self):
return DownloadDrugsFDAFiles()
def output(self):
return luigi.LocalTarget(EXTRACTED_DIR)
def run(self):
zip_filename = RAW_DATA_FILE
output_dir = self.output().path
os.system('unzip -o %(zip_filename)s -d %(output_dir)s' % locals())
class CleanDrugsFDAFiles(luigi.Task):
def requires(self):
return ExtractDrugsFDAFiles()
def output(self):
return luigi.LocalTarget(EXTRACTED_DIR)
def run(self):
for filename in glob.glob(self.input().path + '/ApplicationDocs.txt'):
logging.info('Pre-processing %s', filename)
filtered = filename + '.filtered'
out = open(filtered, 'w')
line_num = 0
bad_lines = 0
with open(filename, 'rU', errors='ignore') as fp:
for line in fp:
line = line.strip()
if line_num < 1:
# First line is usually the header
out.write(line)
else:
if len(line.strip()) > 0:
if re.search(r'^\d{1,}', line):
# Properly formatted line. Append it and move on.
out.write('\n' + line)
else:
# Bad line, most likely due to an unescaped carriage return. Tuck it onto the previous line
out.write(' ' + line)
bad_lines += 1
line_num += 1
logging.info('Issues found & fixed: %s', bad_lines)
out.close()
os.remove(filename)
os.rename(filtered, filename)
class Applications2JSONMapper(parallel.Mapper):
rename_map = {
'ApplNo': 'application_no',
'ApplType': 'application_type',
'ApplPublicNotes': 'application_public_notes',
'SponsorName': 'sponsor_name'
}
def map(self, key, value, output):
def _cleaner(k, v):
''' Helper function to rename keys and purge any keys that are not in
the map.
'''
v = v.strip() if isinstance(v, str) else v
if k in self.rename_map and v is not None and v != '':
return (self.rename_map[k], v)
json = common.transform_dict(value, _cleaner)
if json.get('application_public_notes') != None:
del json['application_public_notes']
if json.get('application_no') and json.get('application_type'):
json['application_number'] = json.get('application_type') + json.get('application_no')
del json['application_type']
del json['application_no']
output.add(key, json)
class Product2JSONMapper(parallel.Mapper):
rename_map = {
'ApplNo': 'application_number',
'ProductNo': 'product_number',
'Form': 'df_and_route',
'Strength': 'strength',
'ReferenceDrug': 'reference_drug',
'DrugName': 'brand_name',
'ActiveIngredient': 'active_ingredients',
'ReferenceStandard': 'reference_standard'
}
VALUE_MAPPINGS = {
"reference_drug": {
"0": "No",
"1": "Yes",
"2": "TBD"
},
"reference_standard": {
"0": "No",
"1": "Yes"
}
}
def map(self, key, value, output):
def _cleaner(k, v):
''' Helper function to rename keys and purge any keys that are not in
the map.
'''
v = v.strip() if isinstance(v, str) else v
if k in self.rename_map and v is not None and v != '':
new_key = self.rename_map[k]
if new_key in self.VALUE_MAPPINGS and v in self.VALUE_MAPPINGS[new_key]:
v = self.VALUE_MAPPINGS[new_key][v]
return (new_key, v)
json = common.transform_dict(value, _cleaner)
# Turn active ingredients into an array of objects as per the mapping.
if json.get('active_ingredients'):
ingredientList = re.sub(';\s+', ';', json['active_ingredients']).split(';')
json['active_ingredients'] = []
strengthList = re.sub(';\s+', ';', json['strength']).split(';') if json.get('strength') else []
for idx, name in enumerate(ingredientList):
ingredient = {'name': name}
if len(strengthList) > idx:
ingredient['strength'] = strengthList[idx]
json['active_ingredients'].append(ingredient)
else:
# Delete to avoid complaints from Elasticsearch.
if json.get('active_ingredients') is not None:
del json['active_ingredients']
if json.get('strength') is not None:
del json['strength']
# Split dosage and form into two distinct fields.
if json.get('df_and_route') and len(json['df_and_route'].split(';')) == 2:
json['dosage_form'] = json['df_and_route'].split(';')[0].strip()
json['route'] = json['df_and_route'].split(';')[1].strip()
# Sometimes the entire entry is Unknown. Indicate this for both df & route.
elif json.get('df_and_route') and "UNKNOWN" in json['df_and_route']:
json['dosage_form'] = json['df_and_route']
json['route'] = json['df_and_route']
# Sometimes the entire only contains dosage form.
else:
json['dosage_form'] = json['df_and_route']
json['route'] = None
# Delete the field either way
del json['df_and_route']
# Assign application number as the key, since all three drugs@FDA files can be joined by this key.
key = build_products_key(json['application_number'], json)
del json['application_number']
output.add(key, json)
def build_products_key(app_number, json):
return ('%s-%s' % (app_number, json['product_number']))
class MarketingStatus2JSONMapper(parallel.Mapper):
def __init__(self, doc_lookup):
parallel.Mapper.__init__(self)
self.doc_lookup = doc_lookup
rename_map = {
'MarketingStatusID': 'marketing_status_id',
'ApplNo': 'application_number',
'ProductNo': 'product_number'
}
def map(self, key, value, output):
def _cleaner(k, v):
''' Helper function to rename keys and purge any keys that are not in
the map.
'''
v = v.strip() if isinstance(v, str) else v
if k in self.rename_map and v is not None and v != '':
return (self.rename_map[k], v)
json = common.transform_dict(value, _cleaner)
if json.get('marketing_status_id'):
json['marketing_status'] = self.doc_lookup[json['marketing_status_id']]
del json['marketing_status_id']
# Assign application number as the key, since all three drugs@FDA files can be joined by this key.
key = build_products_key(json['application_number'], json)
del json['application_number'], json['product_number']
output.add(key, json)
class TE2JSONMapper(parallel.Mapper):
def __init__(self, doc_lookup):
parallel.Mapper.__init__(self)
self.doc_lookup = doc_lookup
rename_map = {
'ApplNo': 'application_number',
'ProductNo': 'product_number',
'MarketingStatusID': 'marketing_status_id',
'TECode': 'te_code'
}
def map(self, key, value, output):
def _cleaner(k, v):
''' Helper function to rename keys and purge any keys that are not in
the map.
'''
v = v.strip() if isinstance(v, str) else v
if k in self.rename_map and v is not None and v != '':
return (self.rename_map[k], v)
json = common.transform_dict(value, _cleaner)
if json.get('marketing_status_id'):
json['marketing_status'] = self.doc_lookup[json['marketing_status_id']]
del json['marketing_status_id']
# Assign application number as the key, since all three drugs@FDA files can be joined by this key.
key = build_products_key(json['application_number'], json)
del json['application_number'], json['product_number']
output.add(key, json)
class Submissions2JSONMapper(parallel.Mapper):
def __init__(self, doc_lookup):
parallel.Mapper.__init__(self)
self.doc_lookup = doc_lookup
rename_map = {
'ApplNo': 'application_number',
'SubmissionClassCodeID': 'submission_class_code_id',
'SubmissionType': 'submission_type',
'SubmissionNo': 'submission_number',
'SubmissionStatus': 'submission_status',
'SubmissionStatusDate': 'submission_status_date',
'SubmissionsPublicNotes': 'submission_public_notes',
'ReviewPriority': 'review_priority'
}
def map(self, key, value, output):
def _cleaner(k, v):
''' Helper function to rename keys and purge any keys that are not in
the map.
'''
v = common.convert_unicode(v.strip()) if isinstance(v, str) else v
if k in self.rename_map and v is not None and v != '':
return (self.rename_map[k], v)
json = common.transform_dict(value, _cleaner)
if json.get('submission_class_code_id') and json.get('submission_class_code_id') is not None:
json['submission_class_code'] = self.doc_lookup[json['submission_class_code_id']][0]
descr = self.doc_lookup[json['submission_class_code_id']][1].rstrip()
if descr:
json['submission_class_code_description'] = descr
del json['submission_class_code_id']
# Convert date to format used throughout openFDA (yyyymmdd)
if json.get('submission_status_date'):
json['submission_status_date'] = arrow.get(json['submission_status_date']).strftime("%Y%m%d")
# Assign application number as the key, since all three drugs@FDA files can be joined by this key.
key = build_submissions_key(json['application_number'], json)
del json['application_number']
output.add(key, json)
def build_submissions_key(app_number, json):
return ('%s-%s-%s' % (app_number, json['submission_type'], json['submission_number']))
class SubmissionPropertyType2JSONMapper(parallel.Mapper):
rename_map = {
'ApplNo': 'application_number',
'SubmissionType': 'submission_type',
'SubmissionNo': 'submission_number',
'SubmissionPropertyTypeCode': 'code'
}
def map(self, key, value, output):
def _cleaner(k, v):
''' Helper function to rename keys and purge any keys that are not in
the map.
'''
v = v.strip() if isinstance(v, str) else v
if k in self.rename_map and v is not None and v != '' and v != 'Null':
return (self.rename_map[k], v)
json = common.transform_dict(value, _cleaner)
# Assign application number as the key, since all three drugs@FDA files can be joined by this key.
key = build_submissions_key(json['application_number'], json)
del json['application_number'], json['submission_number'], json['submission_type']
if json != {}:
output.add(key, json)
class ApplicationsDocs2JSONMapper(parallel.Mapper):
def __init__(self, doc_lookup):
parallel.Mapper.__init__(self)
self.doc_lookup = doc_lookup
rename_map = {
'ApplicationDocsID': 'id',
'ApplicationDocsTypeID': 'type_id',
'ApplNo': 'application_number',
'SubmissionType': 'submission_type',
'SubmissionNo': 'submission_number',
'ApplicationDocsTitle': 'title',
'ApplicationDocsURL': 'url',
'ApplicationDocsDate': 'date'
}
def map(self, key, value, output):
def _cleaner(k, v):
''' Helper function to rename keys and purge any keys that are not in
the map.
'''
v = v.strip() if isinstance(v, str) else v
if k in self.rename_map and v is not None and v != '':
new_key = self.rename_map[k]
if not (new_key == 'title' and v == '0'):
return (new_key, v)
json = common.transform_dict(value, _cleaner)
json['type'] = self.doc_lookup[json['type_id']]
del json['type_id']
# Convert date to format used throughout openFDA (yyyymmdd)
json['date'] = arrow.get(json['date']).strftime("%Y%m%d") if json.get('date') is not None else ""
json['url'] = common.convert_unicode(json['url']) if json.get('url') is not None else ""
# Assign application number as the key, since all three drugs@FDA files can be joined by this key.
key = build_submissions_key(json['application_number'], json)
del json['application_number'], json['submission_number'], json['submission_type']
output.add(key, json)
class Applications2JSON(luigi.Task):
def requires(self):
return CleanDrugsFDAFiles()
def output(self):
return luigi.LocalTarget(APPLICATIONS_DB)
def run(self):
parallel.mapreduce(
parallel.Collection.from_glob(
join(self.input().path, 'Applications.txt'), parallel.CSVDictLineInput(delimiter='\t')),
mapper=Applications2JSONMapper(),
reducer=parallel.IdentityReducer(),
output_prefix=self.output().path)
class Products2JSON(luigi.Task):
def requires(self):
return CleanDrugsFDAFiles()
def output(self):
return luigi.LocalTarget(PRODUCTS_DB)
def run(self):
parallel.mapreduce(
parallel.Collection.from_glob(
join(self.input().path, 'Products.txt'), parallel.CSVDictLineInput(delimiter='\t')),
mapper=Product2JSONMapper(),
reducer=parallel.IdentityReducer(),
output_prefix=self.output().path)
class MarketingStatus2JSON(luigi.Task):
def requires(self):
return CleanDrugsFDAFiles()
def output(self):
return luigi.LocalTarget(MARKETING_STATUS_DB)
def run(self):
with open(join(EXTRACTED_DIR, 'MarketingStatus_Lookup.txt')) as fin:
rows = (line.split('\t') for line in fin)
doc_lookup = {row[0]: row[1] for row in rows}
parallel.mapreduce(
parallel.Collection.from_glob(
join(self.input().path, 'MarketingStatus.txt'), parallel.CSVDictLineInput(delimiter='\t')),
mapper=MarketingStatus2JSONMapper(doc_lookup=doc_lookup),
reducer=parallel.IdentityReducer(),
output_prefix=self.output().path)
class TE2JSON(luigi.Task):
def requires(self):
return CleanDrugsFDAFiles()
def output(self):
return luigi.LocalTarget(TE_DB)
def run(self):
with open(join(EXTRACTED_DIR, 'MarketingStatus_Lookup.txt')) as fin:
rows = (line.split('\t') for line in fin)
doc_lookup = {row[0]: row[1] for row in rows}
parallel.mapreduce(
parallel.Collection.from_glob(
join(self.input().path, 'TE.txt'), parallel.CSVDictLineInput(delimiter='\t')),
mapper=TE2JSONMapper(doc_lookup=doc_lookup),
reducer=parallel.IdentityReducer(),
output_prefix=self.output().path)
class Submissions2JSON(luigi.Task):
def requires(self):
return CleanDrugsFDAFiles()
def output(self):
return luigi.LocalTarget(SUBMISSIONS_DB)
def run(self):
with open(join(EXTRACTED_DIR, 'SubmissionClass_Lookup.txt')) as fin:
rows = ( line.split('\t') for line in fin )
doc_lookup = {row[0]: [row[1], row[2]] for row in rows}
parallel.mapreduce(
parallel.Collection.from_glob(
join(self.input().path, 'Submissions.txt'), parallel.CSVDictLineInput(delimiter='\t')),
mapper=Submissions2JSONMapper(doc_lookup=doc_lookup),
reducer=parallel.IdentityReducer(),
output_prefix=self.output().path)
class SubmissionPropertyType2JSON(luigi.Task):
def requires(self):
return CleanDrugsFDAFiles()
def output(self):
return luigi.LocalTarget(SUBMISSION_PROPERTY_TYPE_DB)
def run(self):
parallel.mapreduce(
parallel.Collection.from_glob(
join(self.input().path, 'SubmissionPropertyType.txt'), parallel.CSVDictLineInput(delimiter='\t')),
mapper=SubmissionPropertyType2JSONMapper(),
reducer=parallel.ListReducer(),
output_prefix=self.output().path)
class ApplicationsDocs2JSON(luigi.Task):
def requires(self):
return CleanDrugsFDAFiles()
def output(self):
return luigi.LocalTarget(APPLICATIONS_DOCS_DB)
def run(self):
with open(join(EXTRACTED_DIR, 'ApplicationsDocsType_Lookup.txt')) as fin:
rows = (line.split('\t') for line in fin)
doc_lookup = {row[0]: row[1].rstrip() for row in rows}
parallel.mapreduce(
parallel.Collection.from_glob(
join(self.input().path, 'ApplicationDocs.txt'), parallel.CSVDictLineInput(delimiter='\t')),
mapper=ApplicationsDocs2JSONMapper(doc_lookup=doc_lookup),
reducer=parallel.ListReducer(),
output_prefix=self.output().path)
class MergeAllMapper(parallel.Mapper):
def __init__(self, applications_db_path, products_db_path, applications_docs_db_path, submissions_db_path,
submissions_property_type_db_path, marketing_status_path, te_db_path):
self.applications_db_path = applications_db_path
self.products_db_path = products_db_path
self.applications_docs_db_path = applications_docs_db_path
self.submissions_db_path = submissions_db_path
self.submissions_property_type_db_path = submissions_property_type_db_path
self.marketing_status_db_path = marketing_status_path
self.te_db_path = te_db_path
def map_shard(self, map_input, map_output):
# Transform product DB into a dictionary keyed by application number
self.products_dict = {}
for key, product in parallel.ShardedDB.open(self.products_db_path).range_iter(None, None):
split = key.split('-')
app_key = split[0]
products_arr = [] if self.products_dict.get(app_key) is None else self.products_dict.get(app_key)
products_arr.append(product)
self.products_dict[app_key] = products_arr
# Transform all sub-product DBs into a dictionary keyed by application number & product number
self.marketing_status_dict = {}
for key, value in parallel.ShardedDB.open(self.marketing_status_db_path).range_iter(None, None):
self.marketing_status_dict[key] = value
self.te_dict = {}
for key, value in parallel.ShardedDB.open(self.te_db_path).range_iter(None, None):
self.te_dict[key] = value
# Transform submissions DB into a dictionary keyed by application number
self.submissions_dict = {}
for key, submission in parallel.ShardedDB.open(self.submissions_db_path).range_iter(None, None):
split = key.split('-')
app_key = split[0]
submissions_arr = [] if self.submissions_dict.get(app_key) is None else self.submissions_dict.get(app_key)
submissions_arr.append(submission)
self.submissions_dict[app_key] = submissions_arr
# Transform all sub-submission DBs into a dictionary keyed by application number & submission number
self.submissions_property_type_dict = {}
for key, value in parallel.ShardedDB.open(self.submissions_property_type_db_path).range_iter(None, None):
self.submissions_property_type_dict[key] = value
self.applications_docs_dict = {}
for key, value in parallel.ShardedDB.open(self.applications_docs_db_path).range_iter(None, None):
self.applications_docs_dict[key] = value
parallel.Mapper.map_shard(self, map_input, map_output)
def map(self, key, application, out):
self.add_products(application)
self.add_submissions(application)
out.add(key, application)
def add_products(self, application):
key = re.sub("[^0-9]", "", application['application_number'])
products = self.products_dict.get(key)
if products:
products = self.add_marketing_status(products, key)
products = self.add_te(products, key)
application['products'] = products
def add_marketing_status(self, products, app_key):
for product in products:
key = build_products_key(app_key, product)
if key in self.marketing_status_dict:
marketing_json = self.marketing_status_dict.get(key)
product['marketing_status'] = marketing_json['marketing_status'].rstrip()
return products
def add_te(self, products, app_key):
for product in products:
key = build_products_key(app_key, product)
if key in self.te_dict:
te_json = self.te_dict.get(key)
if te_json.get('te_code'):
product['te_code'] = te_json['te_code'].rstrip()
if not 'marketing_status' in product and 'marketing_status' in te_json:
product['marketing_status'] = te_json['marketing_status'].rstrip()
return products
def add_submissions(self, application):
key = re.sub("[^0-9]", "", application['application_number'])
submissions = self.submissions_dict.get(key)
if submissions:
submissions = self.add_submissions_property_type(submissions, key)
submissions = self.add_applications_docs(submissions, key)
application['submissions'] = submissions
def add_submissions_property_type(self, submissions, app_key):
for submission in submissions:
key = build_submissions_key(app_key, submission)
if key in self.submissions_property_type_dict:
prop_type = self.submissions_property_type_dict.get(key)
submission['submission_property_type'] = prop_type
return submissions
def add_applications_docs(self, submissions, app_key):
for submission in submissions:
key = build_submissions_key(app_key, submission)
if key in self.applications_docs_dict:
submission['application_docs'] = self.applications_docs_dict.get(key)
return submissions
class MergeAll(luigi.Task):
def requires(self):
return [Applications2JSON(), Products2JSON(), ApplicationsDocs2JSON(), Submissions2JSON(),
SubmissionPropertyType2JSON(), MarketingStatus2JSON(), TE2JSON()]
def output(self):
return luigi.LocalTarget(MERGED_DB)
def run(self):
applications_db = self.input()[0].path
products_db = self.input()[1].path
applications_docs_db = self.input()[2].path
submissions_db = self.input()[3].path
submissions_property_type_db = self.input()[4].path
marketing_status = self.input()[5].path
te_db = self.input()[6].path
parallel.mapreduce(
parallel.Collection.from_sharded(applications_db),
mapper=MergeAllMapper(applications_db, products_db, applications_docs_db, submissions_db,
submissions_property_type_db, marketing_status, te_db),
reducer=parallel.IdentityReducer(),
output_prefix=self.output().path,
map_workers=1,
num_shards=1) # TODO: improve the code to avoid having to limit number of shards to one
class AnnotateDrugsFDA(luigi.Task):
def requires(self):
return [MergeAll(), CombineHarmonization()]
def output(self):
return luigi.LocalTarget(ANNOTATED_DB)
def run(self):
input_db = self.input()[0].path
harmonized_file = self.input()[1].path
parallel.mapreduce(
parallel.Collection.from_sharded(input_db),
mapper=AnnotateMapper(harmonized_file),
reducer=parallel.IdentityReducer(),
output_prefix=self.output().path,
num_shards=1) # TODO: improve the code to avoid having to limit number of shards to one
class LoadJSON(index_util.LoadJSONBase):
index_name = 'drugsfda'
type_name = 'drugsfda'
mapping_file = './schemas/drugsfda_mapping.json'
data_source = AnnotateDrugsFDA()
use_checksum = False
optimize_index = True
last_update_date = lambda _: first_file_timestamp(os.path.dirname(RAW_DATA_FILE))
if __name__ == '__main__':
luigi.run()
| 34.921626
| 112
| 0.690661
|
ae9a44490251efbde3650fbe3120b21fa1162cc0
| 2,994
|
cs
|
C#
|
dotnet/assemblies/NPanday.Artifact/src/main/csharp/ArtifactContext.cs
|
octavian-h/npanday
|
02a0fa919534fc4afee4ae407f8e597fd7f5e6a7
|
[
"Apache-2.0"
] | 6
|
2016-06-06T05:04:19.000Z
|
2021-11-10T13:36:47.000Z
|
dotnet/assemblies/NPanday.Artifact/src/main/csharp/ArtifactContext.cs
|
octavian-h/npanday
|
02a0fa919534fc4afee4ae407f8e597fd7f5e6a7
|
[
"Apache-2.0"
] | 3
|
2017-10-28T14:08:53.000Z
|
2019-02-25T13:24:27.000Z
|
dotnet/assemblies/NPanday.Artifact/src/main/csharp/ArtifactContext.cs
|
isabella232/npanday
|
8070cbb89290c3498f6227b67026df6f491933e0
|
[
"Apache-2.0"
] | 12
|
2015-01-15T14:29:50.000Z
|
2021-11-10T13:36:36.000Z
|
#region Apache License, Version 2.0
//
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
//
#endregion
using System;
using System.IO;
using System.Collections.Generic;
using System.Text;
using NPanday.Model.Settings;
using NPanday.Utils;
namespace NPanday.Artifact
{
public sealed class ArtifactContext : IArtifactContext
{
public ArtifactRepository GetArtifactRepository()
{
ArtifactRepository artifactRepository = new ArtifactRepository();
artifactRepository.Init(this, new DirectoryInfo(SettingsUtil.GetLocalRepositoryPath()));
return artifactRepository;
}
public Artifact GetArtifactFor(NPanday.Model.Pom.Model model)
{
Artifact artifact = new Artifact();
artifact.ArtifactId = model.artifactId;
artifact.GroupId = model.groupId;
artifact.Version = model.version;
artifact.Extension = GetExtensionFor(model.packaging);
return artifact;
}
public Artifact CreateArtifact(String groupId, String artifactId, String version, String packaging)
{
Artifact artifact = new Artifact();
artifact.ArtifactId = artifactId;
artifact.GroupId = groupId;
artifact.Version = version;
artifact.Extension = GetExtensionFor(packaging);
return artifact;
}
public String GetExtensionFor(String packaging)
{
if (packaging.Equals("dotnet-library") || packaging.Equals("library")
|| packaging.Equals("dotnet-maven-plugin") || packaging.Equals("netplugin")
|| packaging.Equals("visual-studio-addin") || packaging.Equals("sharp-develop-addin"))
{
return "dll";
}
else if (packaging.Equals("dotnet-executable") || packaging.Equals("dotnet-windows-executable")
|| packaging.Equals("winexe") || packaging.Equals("exe"))
{
return "exe";
}
else if (packaging.Equals("dotnet-module") || packaging.Equals("module"))
{
return "netmodule";
}
return null;
}
}
}
| 36.072289
| 107
| 0.640949
|
af6b52cdab74b5a765bdb47babc0005f64988caf
| 3,557
|
py
|
Python
|
program/ohkawabata/3_webpage.py
|
shutokawabata0723/kenkyu
|
b613b4daddca9b8b16efe0802669611948daea18
|
[
"MIT"
] | 1
|
2021-05-06T03:35:16.000Z
|
2021-05-06T03:35:16.000Z
|
program/ohkawabata/3_webpage.py
|
shutokawabata0723/kenkyu
|
b613b4daddca9b8b16efe0802669611948daea18
|
[
"MIT"
] | null | null | null |
program/ohkawabata/3_webpage.py
|
shutokawabata0723/kenkyu
|
b613b4daddca9b8b16efe0802669611948daea18
|
[
"MIT"
] | null | null | null |
#coding:utf-8
PURPLE = '\033[35m'
RED = '\033[31m'
CYAN = '\033[36m'
GREEN = '\033[92m'
BLUE = '\033[94m'
ENDC = '\033[0m'
import urllib.request, urllib.error
from urllib.request import Request, urlopen
from urllib.error import URLError
from bs4 import BeautifulSoup
from time import sleep
from extractcontent3 import ExtractContent
import requests
import chardet #Confirm the codec
import timeout_decorator
##### ExtractContent (本文のみ抽出)########
def extractor(html):
extractor = ExtractContent()
# オプション値を指定する
opt = {"threshold":50}
#extractor.set_default(opt)
#html = open("index.html").read() # 解析対象HTML
extractor.analyse(html)
text, title = extractor.as_text()
html, title = extractor.as_html()
title = extractor.extract_title(html)
#print(text)
return text
def soup(html):
soup = BeautifulSoup(html, "html.parser")
soup_title = soup.find_all("title")
soup_h1 = soup.find_all("h1")
soup_h2 = soup.find_all("h2")
soup_h3 = soup.find_all("h3")
soup_h4 = soup.find_all("h4")
soup_h5 = soup.find_all("h5")
soup_h6 = soup.find_all("h6")
soup_a = soup.find_all("a")
soup_p = soup.find_all("p")
soup=soup_title+soup_h1+soup_h2+soup_h3+soup_h4+soup_h5+soup_h6+soup_a+soup_p
maped_list = map(str, soup)
soup1 = ' '.join(maped_list)
soup2 = BeautifulSoup(soup1, "html.parser")
text = soup2.get_text()
return str(text)
def request(url,try_cnt):
try:
r = requests.get(url)
r.raise_for_status()
r.encoding = r.apparent_encoding
html = r.text
text = extractor(html)
text = text.replace(',',' ').replace('\n',' ').replace('\t','').replace('\r','')
print (text[:50]+'...')
return text
except Exception as e:
sleep(0.1)
try_cnt += 1
if try_cnt <= 1:
request(url,try_cnt)
else:
print ('server error occured')
# main function
a = open('sg_urls.csv','r')
b = open('webpage.csv','w')
b.write('page_id,url,suggest,contents\n')
# just in case
#c = open('id-content.csv','w')
#c.write('page_id,content\n')
cnt = 0
for i in a:
#print (GREEN + str(cnt) + ENDC)
if cnt > 0:
LINE = i.rstrip().split(',')
web_id = LINE[0]
url = LINE[1]
suggest= LINE[2]
try_cnt= 0
print (GREEN+str(web_id)+ENDC)
print (BLUE+url+ENDC)
print (CYAN+suggest+ENDC)
#timeout_decorator.timeout(5)
#req = urllib.request.Request(url, headers={'User-Agent': 'Mozilla/5.0'})
#html = urllib.request.urlopen(req)
#text = soup(html)
#text = text.replace(',',' ').replace('\n',' ').replace('\t','').replace('\r','')
if '.pdf' in url:
print('Because of PDF file, skipped\n')
else:
#@timeout_decorator.timeout(30)
timeout_decorator.timeout(5)
try:
text = request(url,try_cnt)
#print (text[:50]+'...')
#print(chardet.detect(text))
print ('')
b.write(str(web_id)+','+url+','+suggest+','+str(text)+'\n')
#c.write(str(web_id)+','+str(text)+'\n')
except:
print ('time error')
sleep(0.5)
cnt += 1
a.close()
b.close()
#c.close()
#url = "https://to-kei.net"
#req = urllib.request.Request(url, headers={'User-Agent': 'Mozilla/5.0'})
#html = urllib.request.urlopen(req)
#text = soup(html)
#text = text.replace(',',' ')
#print(text)
| 25.407143
| 89
| 0.57661
|
b8b655968c44291cc31730f8c4358f5b89426e4d
| 7,217
|
h
|
C
|
DIR819_v1.06/src/kernel/linux-2.6.36.x/drivers/net/eip93_drivers/quickSec/src/lib/sshutil/sshnet/sshdnstransport.h
|
Sirherobrine23/Dir819gpl_code
|
8af92d65416198755974e3247b7bbe7f1151d525
|
[
"BSD-2-Clause"
] | 1
|
2022-03-19T06:38:01.000Z
|
2022-03-19T06:38:01.000Z
|
DIR819_v1.06/src/kernel/linux-2.6.36.x/drivers/net/eip93_drivers/quickSec/src/lib/sshutil/sshnet/sshdnstransport.h
|
Sirherobrine23/Dir819gpl_code
|
8af92d65416198755974e3247b7bbe7f1151d525
|
[
"BSD-2-Clause"
] | null | null | null |
DIR819_v1.06/src/kernel/linux-2.6.36.x/drivers/net/eip93_drivers/quickSec/src/lib/sshutil/sshnet/sshdnstransport.h
|
Sirherobrine23/Dir819gpl_code
|
8af92d65416198755974e3247b7bbe7f1151d525
|
[
"BSD-2-Clause"
] | 1
|
2022-03-19T06:38:03.000Z
|
2022-03-19T06:38:03.000Z
|
/*
* Author: Tero Kivinen <kivinen@iki.fi>
*
* Copyright (c) 2004 SFNT Finland Oy.
*/
/*
* Program: sshdns
*
* Creation : 13:29 Mar 18 2004 kivinen
* Last Modification : 16:03 Feb 24 2009 kivinen
* Version : 1.51
*
*
* Description : DNS Transport layer.
* This layer will send one packet using
* specified transport. It does not retransmit
* packets. It will wait reply for specified
* time and call callback when reply is received.
*
*
*/
#ifndef SSHDNSTRANSPORT_H
#define SSHDNSTRANSPORT_H
/* Transport layer context. */
typedef struct SshDNSTransportRec *SshDNSTransport;
/* Transport host context. */
typedef struct SshDNSTransportHostRec *SshDNSTransportHost;
#include "sshdnstransportimpl.h"
/* Callback to be called when the reply packet is received
in transport layer, or after the request times out. The
return_packet is only valid during this call. */
typedef void (*SshDNSTransportCallback)(SshDNSResponseCode error,
const unsigned char *return_packet,
size_t packet_length,
void *context);
/**********************************************************************/
/* Transport layer. This layer takes care of the transport
protocols TCP/UDP. There is only one transport per each
protocol allocated for given application. */
/* Allocate transport handle. This operation is normally
done only once during the initialization of the library.
The caches etc are allocated using default sizes, and
normally application will call ssh_dns_transport_configure
immediately after this to configure the caches sizes.
This will return NULL if out of memory. */
SshDNSTransport
ssh_dns_transport_allocate(const SshDNSTransportSpecStruct *specification);
/* TCP transport specification. */
extern const SshDNSTransportSpecStruct *ssh_dns_transport_spec_tcp;
/* UDP transport specification. */
extern const SshDNSTransportSpecStruct *ssh_dns_transport_spec_udp;
/* Transport configuration structure. */
typedef struct SshDNSTransportConfigRec {
SshUInt32 close_timeout_us; /* How long keep the connection open and idle
after operation in useconds. Default is
30 000 000 us = 30 seconds. */
size_t max_memory; /* Maximum number of total memory used by
transport. Default is 16 kB. This
includes memory used for host structures
and queued packets waiting to be sent.
It does not include some ADT overhead used
for internal structures. It also does
not include the memory used by the lower
layer transport hooks (tcp, udp etc). */
SshUInt32 prealloc_hosts; /* Number of hosts to preallocate.
Default is 0. */
SshUInt32 keep_hosts; /* Number of hosts to keep even when not used
(will not affect at all if smaller than
prealloc). Default is 4. */
SshUInt32 max_hosts; /* Maximum number of hosts. Default is 64. */
} *SshDNSTransportConfig, SshDNSTransportConfigStruct;
/* Reconfigure cache etc information for the transport. This
can be called at any time, and this will clear all the
caches and automatically abort all active operations
(with timeout). This returns true if the operation was
successful, and FALSE if it run out of memory during the
configure. In case of memory error some of the operations
might have been done, and some may still be using old
values. The transport will still be usable even if memory
error is received. */
Boolean
ssh_dns_transport_configure(SshDNSTransport transport,
SshDNSTransportConfig config);
/* Free transport. There MUST not be any host structures
allocated when this is called. */
void ssh_dns_transport_free(SshDNSTransport transport);
/* Allocate unique ID for the request. This will be global
to the transport protocol. */
SshUInt16 ssh_dns_transport_id(SshDNSTransport transport);
/* Free unique ID. */
void
ssh_dns_transport_id_free(SshDNSTransport transport, SshUInt16 id);
/* Register random number generator to the DNS library. By default the dns
library uses ssh_rand (which needs to be seeded externally before dns
library is used), but that is not safe enough for high security
applications. High security applications needs to initialize the
cryptolibrary and register the ssh_random_get_uint32 as random number
function to the dns library. */
void ssh_dns_transport_register_random_func(SshDNSTransport transport,
SshUInt32 (*rand_func)(void));
/* Return random number using configure random number function. */
SshUInt32 ssh_dns_transport_random_number(SshDNSTransportHost host);
/**********************************************************************/
/* Transport host layer. This is the host specific structure
allocated from the pool of host structures. The DNS
should only keep minimum amount of hosts allocated at one
time, i.e. it should free the host immediately when not
needed any more. The hosts structures are reference
counted, thus there is no need to try to combine the
hosts in the upper layer, instead allocate new host for
each packet. Even when the reference count goes to zero,
the host is not immediately freed, but only after some
time, so if the same host is needed again soon, the old
entry is reused. */
/* Fetch host entry for the pool, or if not found allocate
new one. This will allocate reference to the entry. The
port number is implicit to the transport layer, and is
not given here. This will return NULL if out of memory.
If from_ip is NULL then IP_ADDR_ANY is used. The source
port is always any port. */
SshDNSTransportHost
ssh_dns_transport_host_get(SshDNSTransport transport,
SshIpAddr from_ip,
SshIpAddr to_ip);
/* Return host back to the pool and deallocate reference. */
void
ssh_dns_transport_host_put(SshDNSTransportHost host);
/* Take a refernce to the host. */
void
ssh_dns_transport_host_lock(SshDNSTransportHost host);
/* Unlock reference. */
void
ssh_dns_transport_host_unlock(SshDNSTransportHost host);
/* Send packet using transport protocol to destination host
tied to the transport host. If no reply is received after
timeout_in_us microseconds then the operation times out.
The callback is always called (unless operation is
canceled). The first 16 bits of the packet is the DNS ID,
and it is used to tie the return packets to this reply.
Unique DNS ID is allocated with ssh_dns_transport_id
function. The ID is global to the transport protocol, and
will stay same for retransmissions to same and other
hosts. */
SshOperationHandle
ssh_dns_transport_host_send(SshDNSTransportHost host,
const unsigned char *packet,
size_t packet_length,
SshUInt32 timeout_in_us,
SshUInt32 flags,
SshDNSTransportCallback callback,
void *context);
/* Return name. This is valid as long as the host structure is valid. */
const unsigned char *ssh_dns_transport_host_name(SshDNSTransportHost host);
/* Return implementation data for the lower level transport. */
void *ssh_dns_transport_implementation_data(SshDNSTransportHost host);
#endif /* SSHDNSTRANSPORT_H */
| 39.653846
| 75
| 0.735209
|
157e5275a3c0bd4579ec55e2077e2a5c26c4b611
| 109
|
ru
|
Ruby
|
config.ru
|
davydovanton/landing-project-template
|
ea31d90c9d554bfee900c6bd182ec831e6933b3a
|
[
"MIT"
] | 30
|
2020-02-25T12:56:47.000Z
|
2021-11-11T10:18:27.000Z
|
config.ru
|
davydovanton/landing-project-template
|
ea31d90c9d554bfee900c6bd182ec831e6933b3a
|
[
"MIT"
] | 3
|
2020-04-05T15:36:41.000Z
|
2020-08-19T20:45:58.000Z
|
config.ru
|
davydovanton/landing-project-template
|
ea31d90c9d554bfee900c6bd182ec831e6933b3a
|
[
"MIT"
] | 2
|
2020-04-29T13:58:37.000Z
|
2021-08-30T12:24:47.000Z
|
# frozen_string_literal: true
require_relative './web_app'
require_relative './config/boot'
run WebApp.new
| 15.571429
| 32
| 0.788991
|
06d2dbf4dd9f7533d481998731d0dfdcfd96396d
| 24,383
|
py
|
Python
|
GPT2.py
|
wenhuchen/LogicNLG
|
e986516e5b6d310219215510b3fe1603d03215cd
|
[
"MIT"
] | 141
|
2020-04-23T03:30:16.000Z
|
2022-03-19T08:36:31.000Z
|
GPT2.py
|
wenhuchen/LogicNLG
|
e986516e5b6d310219215510b3fe1603d03215cd
|
[
"MIT"
] | 15
|
2020-04-26T07:12:30.000Z
|
2021-06-10T16:40:35.000Z
|
GPT2.py
|
wenhuchen/LogicNLG
|
e986516e5b6d310219215510b3fe1603d03215cd
|
[
"MIT"
] | 20
|
2020-04-27T03:07:10.000Z
|
2022-01-22T22:13:15.000Z
|
import argparse
import logging
from tqdm import trange
import torch
import torch.nn.functional as F
import numpy as np
from torch import nn
from torch.autograd import Variable
from transformers import GPT2Config
from transformers import GPT2LMHeadModel, GPT2Tokenizer, BertTokenizer
from DataLoader import *
from Model import BERTGen
from utils import sample_sequence
import torch.optim as optim
import math
import sys
import pandas
import os
import numpy
import nltk
from torch.utils.tensorboard import SummaryWriter
import warnings
warnings.filterwarnings("ignore", category=UserWarning)
device = torch.device('cuda')
def set_seed(args):
np.random.seed(args.seed)
torch.manual_seed(args.seed)
if args.n_gpu > 0:
torch.cuda.manual_seed_all(args.seed)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("--model", default='gpt2', type=str)
parser.add_argument("--top_k", type=int, default=0)
parser.add_argument("--top_p", type=float, default=0.9)
parser.add_argument('--seed', type=int, default=42, help="random seed for initialization")
parser.add_argument('--do_train', default=False, action="store_true", help="whether to train or test the model")
parser.add_argument('--do_rl', default=False, action="store_true", help="whether to train or test the model")
parser.add_argument('--do_val', default=False, action="store_true", help="whether to train or test the model")
parser.add_argument('--do_test', default=False, action="store_true", help="whether to compute the BLEU scores on test split")
parser.add_argument('--do_test_challenge', default=False, action="store_true", help="whether to compute the BLEU scores on challenge split")
parser.add_argument('--do_ppl', default=False, action="store_true", help="whether to compute perplexity of the model")
parser.add_argument('--do_verify', default=False, action="store_true", help="whether compute the adv-acc score on test split")
parser.add_argument('--do_verify_challenge', default=False, action="store_true", help="whether compute the adv-acc score on challenge split")
parser.add_argument('--epoch', default=10, type=int, help="whether to train or test the model")
parser.add_argument('--batch_size', default=5, type=int, help="whether to train or test the model")
parser.add_argument('--learning_rate', default=2e-6, type=float, help="whether to train or test the model")
parser.add_argument('--dataset', default='table', type=str, help="whether to train or test the model")
parser.add_argument('--every', default=50, type=int, help="whether to train or test the model")
parser.add_argument('--load_from', default='', type=str, help="whether to train or test the model")
parser.add_argument('--id', default='models', type=str, help="specify the id of the experiment")
parser.add_argument('--max_len', default=800, type=int, help="whether to train or test the model")
parser.add_argument('--dim', default=768, type=int, help="whether to train or test the model")
parser.add_argument('--layers', default=3, type=int, help="whether to train or test the model")
parser.add_argument('--head', default=4, type=int, help="whether to train or test the model")
parser.add_argument("--modelpath", type=str, default="bert-base-uncased",
help="For distributed training: local_rank")
parser.add_argument('--gradient_accumulation_steps', type=int, default=5, help="accumulation steps for gradient")
parser.add_argument('--decode_first_K', type=int, default=10000, help="For debugging purpose")
args = parser.parse_args()
args.device = torch.device("cuda")
args.n_gpu = torch.cuda.device_count()
if args.model == 'gpt2-medium':
args.batch_size = 2
else:
args.batch_size = 5
if args.do_rl:
args.batch_size = 1
tokenizer = GPT2Tokenizer.from_pretrained(args.model)
model = GPT2LMHeadModel.from_pretrained(args.model)
model = nn.DataParallel(model)
model.to(args.device)
if not os.path.exists(args.id):
os.mkdir(args.id)
criterion = nn.CrossEntropyLoss(reduction='none', ignore_index=-1)
if args.do_train:
tb_writer = SummaryWriter(log_dir='tensorboard/GPT2-{}'.format(args.model))
dataset = GPTTableDatabase('data/train_lm.json', None, None, tokenizer, args.batch_size, args.max_len)
model.train()
optimizer = optim.Adam(model.parameters(), args.learning_rate)
avg_loss = 0
global_step = 0
for epoch_idx in range(args.epoch):
print("start training {}th epoch".format(epoch_idx))
dataset.shuffle()
for idx in range(0, dataset.train_len()):
batch = dataset.get_data(idx)
batch = tuple(Variable(t).to(device) for t in batch)
trg_inp, trg_out, mask, caption = batch
inputs = torch.cat([caption, trg_inp], 1)
model.zero_grad()
optimizer.zero_grad()
logits = model(inputs)[0]
logits = logits[:, -trg_out.shape[1]:, :].contiguous()
loss = criterion(logits.view(-1, logits.shape[-1]), trg_out.view(-1))
loss = loss * mask.view(-1)
loss = loss.sum() / mask.sum()
avg_loss += loss.item()
loss.backward()
optimizer.step()
global_step += 1
if idx % args.every == 0 and idx > 0:
tb_writer.add_scalar("perplexity", math.exp(avg_loss / args.every), global_step)
fake_inputs = caption
gt_inputs = trg_out.cpu().data.numpy()
#samples = model.sample(fake_inputs, tabfeat, caption, highlight_idx, bert)
samples = sample_sequence(model, 30, fake_inputs, [])
samples = samples[:, caption.shape[1]:]
samples = samples.cpu().data.numpy()
for s, gt in zip(samples, gt_inputs):
text = tokenizer.decode(s, clean_up_tokenization_spaces=True)
text = text[: text.find(tokenizer.eos_token)]
print("PREDICTION |||||| ", text)
text = tokenizer.decode(gt, clean_up_tokenization_spaces=True)
text = text[: text.find(tokenizer.eos_token)]
print("GROUNDTRUH |||||| ",text)
break
avg_loss = 0
if args.model == 'gpt2':
torch.save(model.state_dict(), '{}/GPT_ep{}.pt'.format(args.id, epoch_idx))
else:
torch.save(model.state_dict(), '{}/GPT_medium_ep{}.pt'.format(args.id, epoch_idx))
tb_writer.close()
if args.do_val:
dataset = GPTTableDatabase(None, 'data/val_lm.json', None, tokenizer, args.batch_size, args.max_len)
model.load_state_dict(torch.load(args.load_from))
model.eval()
with torch.no_grad():
losses = []
for idx in range(0, dataset.val_len()):
batch = dataset.get_data(idx, 'val')
batch = tuple(Variable(t).to(device) for t in batch)
trg_inp, trg_out, mask, caption = batch
inputs = torch.cat([caption, trg_inp], 1)
logits = model(inputs)[0]
logits = logits[:, -trg_out.shape[1]:, :].contiguous()
loss = criterion(logits.view(-1, logits.shape[-1]), trg_out.view(-1))
loss = loss * mask.view(-1)
loss = loss.sum() / mask.sum()
losses.append(loss.item())
avg_loss = sum(losses) / len(losses)
perpelexity = math.exp(avg_loss)
sys.stdout.write("validation perplexity is {} \r".format(perpelexity))
avg_loss = sum(losses) / len(losses)
perplexity = math.exp(avg_loss)
print("validation perplexity is {}".format(perplexity))
if args.do_ppl:
dataset = GPTTableDatabase(None, None, 'data/test_lm.json',
tokenizer, args.batch_size, args.max_len)
model.load_state_dict(torch.load(args.load_from))
model.eval()
with torch.no_grad():
losses = []
for idx in range(0, dataset.test_len()):
batch = dataset.get_data(idx, 'test')
batch = tuple(Variable(t).to(device) for t in batch)
trg_inp, trg_out, mask, caption = batch
inputs = torch.cat([caption, trg_inp], 1)
logits = model(inputs)[0]
logits = logits[:, -trg_out.shape[1]:, :].contiguous()
loss = criterion(logits.view(-1, logits.shape[-1]), trg_out.view(-1))
loss = loss * mask.view(-1)
loss = loss.sum() / mask.sum()
losses.append(loss.item())
avg_loss = sum(losses) / len(losses)
perplexity = math.exp(avg_loss)
print("test perplexity is {}".format(perplexity))
if args.do_test:
dataset = GPTTableDatabase(None, None, 'data/test_lm.json', tokenizer, args.batch_size, args.max_len)
model.load_state_dict(torch.load(args.load_from))
model.eval()
sent_bleus_1 = []
sent_bleus_2 = []
sent_bleus_3 = []
results = {}
with torch.no_grad():
for idx in range(0, min(args.decode_first_K, dataset.test_len())):
batch = dataset.get_data(idx, 'test')
references = dataset.get_reference(idx, 'test')
table_id = dataset.get_table_id(idx, 'test')
results[table_id] = []
batch = tuple(Variable(t).to(device) for t in batch)
trg_inp, trg_out, mask, caption = batch
fake_inputs = caption
samples = sample_sequence(model, 30, fake_inputs, [], top_k=1)
samples = samples[:, caption.shape[1]:]
samples = samples.cpu().data.numpy()
for s in samples:
text = tokenizer.decode(s, clean_up_tokenization_spaces=True)
text = text[: text.find(tokenizer.eos_token)]
results[table_id].append(text)
hypothesis = text.lower().split(' ')
sent_bleus_1.append(nltk.translate.bleu_score.sentence_bleu(
references, hypothesis, weights=(1, 0, 0)))
sent_bleus_2.append(nltk.translate.bleu_score.sentence_bleu(
references, hypothesis, weights=(0.5, 0.5, 0)))
sent_bleus_3.append(nltk.translate.bleu_score.sentence_bleu(
references, hypothesis, weights=(0.33, 0.33, 0.33)))
bleu_1 = format((sum(sent_bleus_1) / len(sent_bleus_1) * 100), '.2f')
bleu_2 = format((sum(sent_bleus_2) / len(sent_bleus_2) * 100), '.2f')
bleu_3 = format((sum(sent_bleus_3) / len(sent_bleus_3) * 100), '.2f')
sys.stdout.write("finished {}/{} BLEU score {}/{}/{} \r".format(idx, dataset.test_len(), bleu_1, bleu_2, bleu_3))
print("total corpus BLEU score = {}/{}/{}".format(bleu_1, bleu_2, bleu_3))
with open('outputs/GPT_{}_{}.json'.format(args.model, bleu_3), 'w') as f:
json.dump(results, f, indent=2)
if args.do_test_challenge:
dataset = GPTTableDatabase(None, None, 'challenge/blind_test_lm_inputs.json', tokenizer, args.batch_size, args.max_len)
model.load_state_dict(torch.load(args.load_from))
model.eval()
results = {}
with torch.no_grad():
for idx in range(0, min(args.decode_first_K, dataset.test_len())):
batch = dataset.get_data(idx, 'test')
references = dataset.get_reference(idx, 'test')
table_id = dataset.get_table_id(idx, 'test')
results[table_id] = []
batch = tuple(Variable(t).to(device) for t in batch)
trg_inp, trg_out, mask, caption = batch
fake_inputs = caption
samples = sample_sequence(model, 30, fake_inputs, [], top_k=1)
samples = samples[:, caption.shape[1]:]
samples = samples.cpu().data.numpy()
for s in samples:
text = tokenizer.decode(s, clean_up_tokenization_spaces=True)
text = text[: text.find(tokenizer.eos_token)]
results[table_id].append(text)
sys.stdout.write("finished {}/{}; speed={}s/sent \r".format(idx,
dataset.test_len(), (time.time() - start_time) / len(results)))
with open('challenge/test_results.json', 'w') as f:
json.dump(results, f, indent=2)
if args.do_verify:
dataset = GPTTableDatabase(None, None, 'data/test_lm_pos_neg.json', tokenizer, args.batch_size, args.max_len)
model.load_state_dict(torch.load(args.load_from))
model.eval()
correct, total = 0, 0
with torch.no_grad():
for idx in range(0, dataset.test_len()):
batch_pos, batch_neg = dataset.get_pair_data(idx, 'test')
batch = tuple(Variable(t).to(device) for t in batch_pos)
trg_inp, trg_out, mask, caption = batch
inputs = torch.cat([caption, trg_inp], 1)
logits = model(inputs)[0]
logits = logits[:, -trg_out.shape[1]:, :].contiguous()
loss = criterion(logits.view(-1, logits.shape[-1]), trg_out.view(-1))
loss = loss.reshape(logits.shape[0], -1)
loss_per_instance = (loss * mask).sum(1) / mask.sum(1)
pos_perpelexity_per_instance = torch.exp(loss_per_instance.cpu().data)
batch = tuple(Variable(t).to(device) for t in batch_neg)
trg_inp, trg_out, mask, caption = batch
inputs = torch.cat([caption, trg_inp], 1)
logits = model(inputs)[0]
logits = logits[:, -trg_out.shape[1]:, :].contiguous()
loss = criterion(logits.view(-1, logits.shape[-1]), trg_out.view(-1))
loss = loss.reshape(logits.shape[0], -1)
loss_per_instance = (loss * mask).sum(1) / mask.sum(1)
neg_perpelexity_per_instance = torch.exp(loss_per_instance.cpu().data)
comparison = (pos_perpelexity_per_instance < neg_perpelexity_per_instance).float()
correct += comparison.sum(-1).item()
total += comparison.shape[0]
sys.stdout.write('finished {}/{} accuracy {} \r'.format(idx, dataset.test_len(), correct / total))
print('total accuracy = {}'.format(correct / total))
if args.do_verify_challenge:
dataset = GPTTableDatabase(None, None, 'challenge/blind_test_lm_pos_neg.json', tokenizer, args.batch_size, args.max_len)
model.load_state_dict(torch.load(args.load_from))
model.eval()
correct, total = 0, 0
results = {}
with torch.no_grad():
for idx in range(0, dataset.test_len()):
batch_pos, batch_neg = dataset.get_pair_data(idx, 'test')
table_name = dataset.get_item(idx, 'test')
results[table_name] = []
batch = tuple(Variable(t).to(device) for t in batch_pos)
trg_inp, trg_out, mask, caption = batch
inputs = torch.cat([caption, trg_inp], 1)
logits = model(inputs)[0]
logits = logits[:, -trg_out.shape[1]:, :].contiguous()
loss = criterion(logits.view(-1, logits.shape[-1]), trg_out.view(-1))
loss = loss.reshape(logits.shape[0], -1)
loss_per_instance = (loss * mask).sum(1) / mask.sum(1)
pos_perpelexity_per_instance = torch.exp(loss_per_instance.cpu().data).tolist()
batch = tuple(Variable(t).to(device) for t in batch_neg)
trg_inp, trg_out, mask, caption = batch
inputs = torch.cat([caption, trg_inp], 1)
logits = model(inputs)[0]
logits = logits[:, -trg_out.shape[1]:, :].contiguous()
loss = criterion(logits.view(-1, logits.shape[-1]), trg_out.view(-1))
loss = loss.reshape(logits.shape[0], -1)
loss_per_instance = (loss * mask).sum(1) / mask.sum(1)
neg_perpelexity_per_instance = torch.exp(loss_per_instance.cpu().data).tolist()
for p1, p2 in zip(pos_perpelexity_per_instance, neg_perpelexity_per_instance):
if p1 < p2:
results[table_name].append('unknown1')
else:
results[table_name].append('unknown2')
sys.stdout.write('finished {}/{}\r'.format(idx, dataset.test_len()))
with open('challenge/verify_results.json', 'w') as f:
json.dump(results, f, indent=2)
if args.do_rl:
def assemble_distribute(GPT_tokens, rewards, tokenizer, bert_tokenizer):
GPT_tokens = tokenizer.convert_ids_to_tokens(GPT_tokens)
gpt_mapping = []
count = 0
for i, x in enumerate(GPT_tokens):
if x[0] == '\u0120' or i == 0:
gpt_mapping.append(count)
count += 1
else:
count -= 1
gpt_mapping.append(count)
count += 1
sentence = tokenizer.convert_tokens_to_string(GPT_tokens)
ids = bert_tokenizer.tokenize(sentence)
bert_mapping = []
count = 0
for i, x in enumerate(ids):
if x.startswith('##'):
count -= 1
bert_mapping.append(count)
count += 1
else:
bert_mapping.append(count)
count += 1
# start calculating rewards
sent_rewards = []
tmp = []
for i, x in enumerate(bert_mapping):
if i > 0 and x != bert_mapping[i - 1]:
sent_rewards.append(sum(tmp) / len(tmp))
tmp = [rewards[i]]
else:
tmp.append(rewards[i])
sent_rewards.append(sum(tmp) / len(tmp))
token_rewards = []
for _ in gpt_mapping:
token_rewards.append(sent_rewards[_])
return token_rewards
model.load_state_dict(torch.load(args.load_from))
print("loading from {}".format(args.load_from))
model.train()
bert_tokenizer = BertTokenizer.from_pretrained(args.modelpath)
scorer = BERTGen(bert_tokenizer.vocab_size, args.dim, args.layers, args.head, args.modelpath)
scorer.to(args.device)
scorer.load_state_dict(torch.load('models/BERT_scorer_ep9.pt'))
scorer.eval()
optimizer = optim.Adam(model.parameters(), 5e-7)
avg_loss = 0
for epoch_idx in range(args.epoch):
print("start training {}th epoch".format(epoch_idx))
dataset.shuffle()
for idx in range(0, dataset.train_len()):
batch = dataset.get_data(idx, details=True)
table, sub_columns, title = batch[4:]
batch = tuple(Variable(t).to(device) for t in batch[:4])
trg_inp, trg_out, mask, caption = batch
if (idx + 1) % 2 == 0:
# Do RL training
samples = sample_sequence(model, 30, caption, [])
samples = samples[:, caption.shape[1]:][0]
samples = samples.cpu().data.numpy()
end = numpy.where(samples == tokenizer.eos_token_id)[0]
if len(end) > 0:
samples = samples[:end[0]]
sentence = tokenizer.decode(samples)
e_tokens = bert_tokenizer.tokenize(sentence)
desc = linearize_table(table, sub_columns[0], title[0], bert_tokenizer)
e_idx = bert_tokenizer.convert_tokens_to_ids(e_tokens)
inputs = []
outputs = []
for i in range(len(e_tokens)):
inputs.append(bert_tokenizer.convert_tokens_to_ids(
e_tokens[:i] + ['[MASK]'] + e_tokens[i + 1:]))
outputs.append([-1] * i + [e_idx[i]] + [-1] * (len(e_tokens) - i - 1))
desc = torch.LongTensor(desc).unsqueeze(0).to(device)
inputs = torch.LongTensor(inputs).to(device)
outputs = torch.LongTensor(outputs).to(device)
with torch.no_grad():
logits = scorer(inputs, desc)
loss = criterion(logits.view(-1, logits.shape[-1]),
outputs.view(-1)).view(logits.shape[0], -1).cpu().data
prob = torch.exp(-loss)
indexes = torch.arange(0, logits.shape[0])[:, None].long()
probs = torch.gather(prob, -1, indexes)
rewards = assemble_distribute(samples, probs.view(-1).numpy(), tokenizer, bert_tokenizer)
rewards = torch.FloatTensor(rewards).to(args.device)
#rewards = torch.cat([rewards, torch.FloatTensor([1.0]).to(device)], 0)
#rewards = rewards - torch.mean(rewards)
rewards = Variable(rewards)
samples = torch.from_numpy(samples).to(args.device).unsqueeze(0)
# samples = torch.cat([samples, torch.LongTensor(
# [tokenizer.eos_token_id]).to(device)], 0).unsqueeze(0)
samples = Variable(samples)
inputs = torch.cat([caption, samples], 1)
logits = model(inputs)[0]
logits = logits[:, -samples.shape[1] - 1:-1, :].contiguous()
loss = criterion(logits.view(-1, logits.shape[-1]), samples.view(-1))
loss = (loss * rewards).mean()
else:
# Do MLE training
inputs = torch.cat([caption, trg_inp], 1)
logits = model(inputs)[0]
logits = logits[:, -trg_out.shape[1]:, :].contiguous()
loss = criterion(logits.view(-1, logits.shape[-1]), trg_out.view(-1))
loss = loss * mask.view(-1)
loss = loss.sum() / mask.sum()
avg_loss += loss.item()
if (idx + 1) % args.gradient_accumulation_steps == 0:
loss.backward()
optimizer.step()
model.zero_grad()
optimizer.zero_grad()
if (idx + 1) % args.every == 0:
#sys.stdout.write('finished {} samples loss = {} \r'.format(idx, avg_loss / 50))
print('finished {} samples loss = {}, perpelexity = {}'.format(
idx, avg_loss / args.every, math.exp(avg_loss / args.every)))
fake_inputs = caption
gt_inputs = trg_out.cpu().data.numpy()
#samples = model.sample(fake_inputs, tabfeat, caption, highlight_idx, bert)
samples = sample_sequence(model, 30, fake_inputs, [])
samples = samples[:, caption.shape[1]:]
samples = samples.cpu().data.numpy()
for s, gt in zip(samples, gt_inputs):
text = tokenizer.decode(s, clean_up_tokenization_spaces=True)
text = text[: text.find(tokenizer.eos_token)]
print(text)
break
avg_loss = 0
torch.save(model.state_dict(), '{}/GPT_RL_ep{}.pt'.format(args.id, epoch_idx))
| 44.575868
| 145
| 0.557725
|
ff497d7c839a46ddc9d1b2e37453ea4aee8683e0
| 949
|
py
|
Python
|
tests/b901.py
|
admdev8/flake8-bugbear
|
55533c95c6f30d8f88db847fb3f01d7ddb57b280
|
[
"MIT"
] | 1
|
2020-09-04T17:13:23.000Z
|
2020-09-04T17:13:23.000Z
|
tests/b901.py
|
admdev8/flake8-bugbear
|
55533c95c6f30d8f88db847fb3f01d7ddb57b280
|
[
"MIT"
] | 4
|
2020-09-04T17:13:36.000Z
|
2020-09-04T17:32:57.000Z
|
tests/b901.py
|
admdev8/flake8-bugbear
|
55533c95c6f30d8f88db847fb3f01d7ddb57b280
|
[
"MIT"
] | null | null | null |
"""
Should emit:
B901 - on lines 9, 36
"""
def broken():
if True:
return [1, 2, 3]
yield 3
yield 2
yield 1
def not_broken():
if True:
return
yield 3
yield 2
yield 1
def not_broken2():
return not_broken()
def not_broken3():
return
yield from not_broken()
def broken2():
return [3, 2, 1]
yield from not_broken()
async def not_broken4():
import asyncio
await asyncio.sleep(1)
return 1
def actually_not_broken():
yield 2
return 1 # noqa
def not_broken5():
def inner():
return 2
yield inner()
def not_broken6():
return (yield from [])
def not_broken7():
x = yield from []
return x
def not_broken8():
x = None
def inner(ex):
nonlocal x
x = ex
inner((yield from []))
return x
class NotBroken9(object):
def __await__(self):
yield from function()
return 42
| 11.297619
| 29
| 0.563751
|
e08ffa63a6fdb7a958e8ddbd52da7da246f04b6c
| 2,381
|
h
|
C
|
deps/museum/5.0.0/bionic/libc/kernel/uapi/linux/fcntl.h
|
simpleton/profilo
|
91ef4ba1a8316bad2b3080210316dfef4761e180
|
[
"Apache-2.0"
] | null | null | null |
deps/museum/5.0.0/bionic/libc/kernel/uapi/linux/fcntl.h
|
simpleton/profilo
|
91ef4ba1a8316bad2b3080210316dfef4761e180
|
[
"Apache-2.0"
] | null | null | null |
deps/museum/5.0.0/bionic/libc/kernel/uapi/linux/fcntl.h
|
simpleton/profilo
|
91ef4ba1a8316bad2b3080210316dfef4761e180
|
[
"Apache-2.0"
] | null | null | null |
/****************************************************************************
****************************************************************************
***
*** This header was automatically generated from a Linux kernel header
*** of the same name, to make information necessary for userspace to
*** call into the kernel available to libc. It contains only constants,
*** structures, and macros generated from the original header, and thus,
*** contains no copyrightable information.
***
*** To edit the content of this header, modify the corresponding
*** source file (e.g. under external/kernel-headers/original/) then
*** run bionic/libc/kernel/tools/update_all.py
***
*** Any manual change here will be lost the next time this script will
*** be run. You've been warned!
***
****************************************************************************
****************************************************************************/
#ifndef _UAPI_LINUX_FCNTL_H
#define _UAPI_LINUX_FCNTL_H
#define UAPI_LINUX_FCNTL_H
#define UAPI_LINUX_FCNTL_H_
#define _LINUX_FCNTL_H
#define _LINUX_FCNTL_H_
#define _UAPI_LINUX_FCNTL_H_
#include <museum/5.0.0/bionic/libc/asm/fcntl.h>
#define F_SETLEASE (F_LINUX_SPECIFIC_BASE + 0)
/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
#define F_GETLEASE (F_LINUX_SPECIFIC_BASE + 1)
#define F_CANCELLK (F_LINUX_SPECIFIC_BASE + 5)
#define F_DUPFD_CLOEXEC (F_LINUX_SPECIFIC_BASE + 6)
#define F_NOTIFY (F_LINUX_SPECIFIC_BASE+2)
/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
#define F_SETPIPE_SZ (F_LINUX_SPECIFIC_BASE + 7)
#define F_GETPIPE_SZ (F_LINUX_SPECIFIC_BASE + 8)
#define DN_ACCESS 0x00000001
#define DN_MODIFY 0x00000002
/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
#define DN_CREATE 0x00000004
#define DN_DELETE 0x00000008
#define DN_RENAME 0x00000010
#define DN_ATTRIB 0x00000020
/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
#define DN_MULTISHOT 0x80000000
#define AT_FDCWD -100
#define AT_SYMLINK_NOFOLLOW 0x100
#define AT_REMOVEDIR 0x200
/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
#define AT_SYMLINK_FOLLOW 0x400
#define AT_NO_AUTOMOUNT 0x800
#define AT_EMPTY_PATH 0x1000
#endif
/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
| 44.092593
| 78
| 0.669887
|
2dfa5e99d6af91cd6b7786fda111be2f6259db35
| 26,539
|
cc
|
C++
|
tensorflow/core/grappler/optimizers/dependency_optimizer.cc
|
aaniin/tensorflow
|
fc8b900e03cd20d3af30ea718bc48b18a10f124d
|
[
"Apache-2.0"
] | 1
|
2021-05-21T14:59:46.000Z
|
2021-05-21T14:59:46.000Z
|
tensorflow/core/grappler/optimizers/dependency_optimizer.cc
|
shekharpalit/tensorflow
|
6aa83398ab03bfae822f36772757097bcb98b6ed
|
[
"Apache-2.0"
] | 1
|
2019-02-22T00:50:13.000Z
|
2019-02-22T00:50:13.000Z
|
tensorflow/core/grappler/optimizers/dependency_optimizer.cc
|
shekharpalit/tensorflow
|
6aa83398ab03bfae822f36772757097bcb98b6ed
|
[
"Apache-2.0"
] | 1
|
2021-05-21T15:00:04.000Z
|
2021-05-21T15:00:04.000Z
|
/* Copyright 2017 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#include "tensorflow/core/grappler/optimizers/dependency_optimizer.h"
#include "absl/container/flat_hash_map.h"
#include "tensorflow/core/framework/node_def.pb.h"
#include "tensorflow/core/framework/op.h"
#include "tensorflow/core/graph/tensor_id.h"
#include "tensorflow/core/grappler/costs/graph_properties.h"
#include "tensorflow/core/grappler/grappler_item.h"
#include "tensorflow/core/grappler/mutable_graph_view.h"
#include "tensorflow/core/grappler/op_types.h"
#include "tensorflow/core/grappler/optimizers/constant_folding.h"
#include "tensorflow/core/grappler/utils.h"
#include "tensorflow/core/grappler/utils/topological_sort.h"
#include "tensorflow/core/lib/core/errors.h"
#include "tensorflow/core/lib/core/stringpiece.h"
#include "tensorflow/core/lib/gtl/inlined_vector.h"
#include "tensorflow/core/lib/strings/str_util.h"
#include "tensorflow/core/lib/strings/strcat.h"
#include "tensorflow/core/util/device_name_utils.h"
namespace tensorflow {
namespace grappler {
namespace {
// Builds a map from the &graph->node(i) to i.
absl::flat_hash_map<const NodeDef*, int> BuildNodeToIdx(const GraphDef& graph) {
// Set up &node -> index map.
absl::flat_hash_map<const NodeDef*, int> node_to_idx;
for (int i = 0; i < graph.node_size(); ++i) {
const NodeDef& node = graph.node(i);
node_to_idx[&node] = i;
}
return node_to_idx;
}
} // namespace
bool DependencyOptimizer::SafeToRemoveIdentity(const NodeDef& node) const {
if (!IsIdentity(node) && !IsIdentityN(node)) {
return true;
}
if (nodes_to_preserve_.find(node.name()) != nodes_to_preserve_.end()) {
return false;
}
if (!fetch_nodes_known_) {
// The output values of this node may be needed.
return false;
}
MutableGraphView::OutputPort port = graph_view_->GetRegularFanin(
MutableGraphView::InputPort(const_cast<NodeDef*>(&node), 0));
NodeDef* input = port.node;
CHECK(input != nullptr) << "node = " << node.name()
<< " input = " << node.input(0);
// Don't remove Identity nodes corresponding to Variable reads or following
// Recv.
if (IsVariable(*input) || IsRecv(*input)) {
return false;
} else if (IsSwitch(*input)) {
// Don't turn Identity nodes following Switch into NoOp or remove them
// if it requires anchoring a control dependencies to the Switch node, which
// is not valid.
MutableGraphView::OutputPort control_port(const_cast<NodeDef*>(&node),
Graph::kControlSlot);
auto control_fanouts = graph_view_->GetFanout(control_port);
if (!control_fanouts.empty()) {
return false;
}
}
bool node_has_multiple_inputs =
graph_view_->NumFanins(node, /*include_controlling_nodes=*/true) > 1;
auto fanouts =
graph_view_->GetFanouts(node, /*include_controlled_nodes=*/true);
for (auto fanout : fanouts) {
if (node_has_multiple_inputs && IsMerge(*fanout.node)) {
return false;
}
if (IsSwitch(*input)) {
if (graph_view_->HasFanin(*fanout.node,
{node.name(), Graph::kControlSlot})) {
return false;
}
}
}
return true;
}
bool DependencyOptimizer::SafeToConvertToNoOp(const NodeDef& node) const {
if (!fetch_nodes_known_ ||
nodes_to_preserve_.find(node.name()) != nodes_to_preserve_.end()) {
return false;
}
if (IsMerge(node) || IsSwitch(node) || ModifiesFrameInfo(node) ||
!IsFreeOfSideEffect(node)) {
return false;
}
if (node.op().rfind("Submodel", 0) == 0) {
return false;
}
const OpDef* op_def = nullptr;
Status status = OpRegistry::Global()->LookUpOpDef(node.op(), &op_def);
if (!status.ok() || op_def->output_arg_size() == 0) {
return false;
}
const absl::flat_hash_set<string> do_not_rewrite_ops{
"Assert", "CheckNumerics", "_Retval",
"_Arg", "_ParallelConcatUpdate", "TPUExecute",
"TPUCompile", "ControlTrigger"};
if (do_not_rewrite_ops.find(node.op()) != do_not_rewrite_ops.end()) {
return false;
}
if (!SafeToRemoveIdentity(node)) {
return false;
}
if (graph_view_->NumFanouts(node, /*include_controlled_nodes=*/false) > 0) {
// The output values of this node may be needed.
return false;
}
return true;
}
int DependencyOptimizer::NumEdgesIfBypassed(
const NodeDef& node, int num_controlling_fanins,
const absl::flat_hash_set<MutableGraphView::Edge>& fanin_edges,
const absl::flat_hash_set<MutableGraphView::Edge>& fanout_edges,
int num_unique_fanout_nodes) const {
const bool is_multi_input_identity_n =
IsIdentityN(node) && !IsIdentityNSingleInput(node);
const int num_fanins = fanin_edges.size();
if (is_multi_input_identity_n) {
// multi-input identity_n with input/output control dependencies will likely
// increase number of edges after optimization.
int num_edges_if_bypassed = 0;
int num_non_controlling_fanins = num_fanins - num_controlling_fanins;
num_edges_if_bypassed += num_non_controlling_fanins;
num_edges_if_bypassed += num_controlling_fanins * num_unique_fanout_nodes;
for (const auto& fanout : fanout_edges) {
if (fanout.dst.port_id == Graph::kControlSlot) {
num_edges_if_bypassed += num_fanins;
} else {
++num_edges_if_bypassed;
}
}
return num_edges_if_bypassed;
} else {
return num_fanins * num_unique_fanout_nodes;
}
}
bool DependencyOptimizer::BypassingNodeIsBeneficial(
const NodeDef& node, int num_controlling_fanins,
const absl::flat_hash_set<MutableGraphView::Edge>& fanin_edges,
const absl::flat_hash_set<MutableGraphView::Edge>& fanout_edges) const {
const bool is_identity = IsIdentity(node) || IsIdentityNSingleInput(node);
const bool is_multi_input_identity_n =
IsIdentityN(node) && !IsIdentityNSingleInput(node);
const int num_fanins = fanin_edges.size();
absl::flat_hash_set<NodeDef*> unique_fanout_nodes;
for (const auto& fanout_edge : fanout_edges) {
unique_fanout_nodes.insert(fanout_edge.dst.node);
}
const int num_unique_fanout_nodes = unique_fanout_nodes.size();
if (NumEdgesIfBypassed(node, num_controlling_fanins, fanin_edges,
fanout_edges, num_unique_fanout_nodes) >
num_fanins + num_unique_fanout_nodes) {
return false;
}
// Make sure that we don't increase the number of edges that cross
// device boundaries.
if ((num_fanins == 1 && num_unique_fanout_nodes > 1 &&
fanin_edges.begin()->src.node->device() != node.device()) ||
(num_fanins > 1 && num_unique_fanout_nodes == 1 &&
fanout_edges.begin()->dst.node->device() != node.device())) {
return false;
}
// TODO(rmlarsen): Not all device crossings are equally expensive.
// Assign a cost to each based on device affinity and compute a
// cost before and after.
const string& node_dev = node.device();
int num_cross_in = 0;
for (const auto& fanin : fanin_edges) {
num_cross_in += static_cast<int>(fanin.src.node->device() != node_dev);
}
int num_cross_out = 0;
for (const auto& fanout : unique_fanout_nodes) {
num_cross_out += static_cast<int>(fanout->device() != node_dev);
}
// Make sure we do not increase the number of device crossings.
const int num_cross_before = num_cross_in + num_cross_out;
int num_cross_after = 0;
for (const auto& fanin : fanin_edges) {
for (const auto& fanout : unique_fanout_nodes) {
num_cross_after +=
static_cast<int>(fanin.src.node->device() != fanout->device());
}
}
if (num_cross_after > num_cross_before) {
return false;
}
if ((is_identity || is_multi_input_identity_n) && num_cross_in > 0 &&
num_cross_out > 0 && num_cross_after > 0) {
// This identity node follows a device crossing, so it might be
// following a _Recv node after partioning. Do not remove such nodes,
// unless they only have consumers on the same device as themselves.
return false;
}
return true;
}
Status DependencyOptimizer::OptimizeNode(
const string& node_name, SetVector<string>* nodes_to_simplify,
absl::flat_hash_set<string>* nodes_to_delete) {
NodeDef* node = graph_view_->GetNode(node_name);
const bool is_noop = IsNoOp(*node);
const bool is_identity = IsIdentity(*node) || IsIdentityNSingleInput(*node);
const bool is_multi_input_identity =
IsIdentityN(*node) && !IsIdentityNSingleInput(*node);
// WARNING: This is a strong assumption based on the executor behavior that
// constant nodes with no input control dependency are always executed early.
// In this case we then can prune all their output control dependencies.
if (IsConstant(*node) &&
graph_view_->NumFanins(*node, /*include_controlling_nodes=*/true) == 0) {
MutableGraphView::OutputPort control_port(node, Graph::kControlSlot);
auto control_fanouts = graph_view_->GetFanout(control_port);
for (const auto& fanout : control_fanouts) {
TF_RETURN_IF_ERROR(
graph_view_->RemoveControllingFanin(fanout.node->name(), node_name));
nodes_to_simplify->PushBack(fanout.node->name());
}
if (graph_view_->NumFanouts(*node, /*include_controlled_nodes=*/true) ==
0 &&
fetch_nodes_known_ &&
nodes_to_preserve_.find(node_name) == nodes_to_preserve_.end()) {
// Mark the node for deletion.
nodes_to_delete->insert(node_name);
}
return Status::OK();
}
// Change ops that only have control dependencies as outputs to NoOps.
if (!is_noop && SafeToConvertToNoOp(*node)) {
VLOG(1) << "***** Replacing " << node_name << " (" << node->op()
<< ") with NoOp.";
// The outputs of this node are not consumed. Replace its inputs with
// control dependencies and replace the op itself with the NoOp op.
const int num_regular_fanins =
graph_view_->NumFanins(*node, /*include_controlling_nodes=*/false);
absl::flat_hash_set<string> regular_fanin_names;
for (int i = 0; i < num_regular_fanins; ++i) {
regular_fanin_names.emplace(ParseTensorName(node->input(i)).node());
}
TF_RETURN_IF_ERROR(
graph_view_->UpdateAllRegularFaninsToControlling(node_name));
TF_RETURN_IF_ERROR(
graph_view_->UpdateNode(node_name, "NoOp", node->device(), {}));
for (const string& regular_fanin_name : regular_fanin_names) {
nodes_to_simplify->PushBack(regular_fanin_name);
}
nodes_to_simplify->PushBack(node_name);
return Status::OK();
}
// Remove NoOp nodes if the product of their fan-in and fan-out is less than
// or equal to the sum of the fan-in and fan-out. The non-trivial rewrites
// take the following form:
//
// Case a)
// x --^> +------+ x --^> +---+
// y --^> | NoOp | --^> a ==> y --^> | a |
// ... | | ... | |
// z --^> +------+ z --^> +---+
//
// Case b)
// +------+ --^> a +---+ --^> a
// x --^> | NoOp | --^> b ==> | x | --^> b
// | | ... | | ...
// +------+ --^> c +---+ --^> c
// Case c)
// +------+ x ---^> a
// x --^> | NoOp | --^> a ==> \/
// y --^> | | --^> b /\
// +------+ y ---^> b
//
// We only apply this optimization if we don't increase the number of control
// edges across device boundaries, e.g. in cases a) and b) if NoOp and
// a and x, respectively, are on the same device. Control edges across device
// boundaries require inter-device communication (Send/Recv pairs to be
// inserted in the graph), which is very costly.
//
// We also remove identity nodes, subject to the same constraints on number of
// resulting control edges and device boundary crossings:
//
// Case a)
// +----------+ ---> a +---+ ---> a
// x --> | Identity | --^> b ==> | x | --^> b
// | | ... | | ...
// +----------+ --^> c +---+ --^> c
//
// Case b)
// x ---> +----------+ ---> a x ---> +---+
// y --^> | Identity | ==> y --^> | a |
// ... | | ... | |
// z --^> +----------+ z --^> +---+
//
// Case c)
// +----------+ x ---> +---+
// x ---> | Identity | ---> a ==> \--^> | a |
// y --^> | | --^> b /\ +---+
// +----------+ y --^> b
if (is_noop || ((is_identity || is_multi_input_identity) &&
SafeToRemoveIdentity(*node))) {
auto fanin_edges =
graph_view_->GetFaninEdges(*node, /*include_controlling_edges=*/true);
std::vector<NodeDef*> controlling_fanins;
controlling_fanins.reserve(fanin_edges.size());
for (const auto& fanin_edge : fanin_edges) {
if (fanin_edge.src.port_id == Graph::kControlSlot) {
controlling_fanins.push_back(fanin_edge.src.node);
}
}
auto fanout_edges =
graph_view_->GetFanoutEdges(*node, /*include_controlled_edges=*/true);
if (!BypassingNodeIsBeneficial(*node, controlling_fanins.size(),
fanin_edges, fanout_edges)) {
return Status::OK();
}
VLOG(1) << "***** Rerouting input around\n" << node->DebugString();
absl::flat_hash_set<NodeDef*> processed_nodes;
for (const auto& fanout_edge : fanout_edges) {
NodeDef* consumer = fanout_edge.dst.node;
const int src_port = fanout_edge.src.port_id;
if ((is_identity && src_port == 0) ||
(is_multi_input_identity && src_port > Graph::kControlSlot)) {
// Identity regular fanins.
const string& input_to_forwards = node->input(src_port);
TF_RETURN_IF_ERROR(graph_view_->UpdateRegularFaninByPort(
consumer->name(), fanout_edge.dst.port_id,
ParseTensorName(input_to_forwards)));
} else if (is_identity || is_multi_input_identity) {
// Identity control dependency.
// TODO(lyandy): Handle IdentityN properly here by adding all regular
// fanins as controlling fanins.
const string& node_first_input = node->input(0);
TF_RETURN_IF_ERROR(graph_view_->UpdateFanin(
consumer->name(), {node_name, Graph::kControlSlot},
{ParseTensorName(node_first_input).node(), Graph::kControlSlot}));
} else {
// NoOp.
TF_RETURN_IF_ERROR(
graph_view_->RemoveControllingFanin(consumer->name(), node_name));
}
processed_nodes.insert(consumer);
nodes_to_simplify->PushBack(consumer->name());
}
for (const auto& processed_node : processed_nodes) {
// Forward dependency from input to consumer if it doesn't already
// depend on it.
for (const auto& controlling_fanin : controlling_fanins) {
TF_RETURN_IF_ERROR(graph_view_->AddControllingFanin(
processed_node->name(),
{controlling_fanin->name(), Graph::kControlSlot}));
nodes_to_simplify->PushBack(controlling_fanin->name());
}
}
if (fetch_nodes_known_ &&
nodes_to_preserve_.find(node_name) == nodes_to_preserve_.end()) {
// Disconnect the node from its inputs to enable further optimizations.
TF_RETURN_IF_ERROR(graph_view_->RemoveAllFanins(
node_name, /*keep_controlling_fanins=*/false));
// Mark the node for deletion.
nodes_to_delete->insert(node_name);
}
}
return Status::OK();
}
Status DependencyOptimizer::OptimizeDependencies() {
SetVector<string> nodes_to_simplify;
absl::flat_hash_set<string> nodes_to_delete;
for (int i = 0; i < graph_view_->graph()->node_size(); ++i) {
const NodeDef& node = graph_view_->graph()->node(i);
if (IsNoOp(node) || IsIdentity(node) || IsIdentityN(node) ||
IsConstant(node) || SafeToConvertToNoOp(node)) {
nodes_to_simplify.PushBack(node.name());
}
}
while (!nodes_to_simplify.Empty()) {
string node_to_simplify = nodes_to_simplify.PopBack();
// Discard nodes that were marked for deletion already.
while (nodes_to_delete.find(node_to_simplify) != nodes_to_delete.end()) {
node_to_simplify = nodes_to_simplify.PopBack();
}
TF_RETURN_IF_ERROR(
OptimizeNode(node_to_simplify, &nodes_to_simplify, &nodes_to_delete));
}
if (fetch_nodes_known_) {
VLOG(1) << "Deleted " << nodes_to_delete.size() << " out of "
<< graph_view_->graph()->node_size() << " nodes.";
TF_RETURN_IF_ERROR(graph_view_->DeleteNodes(nodes_to_delete));
}
return Status::OK();
}
Status DependencyOptimizer::TransitiveReduction() {
// PRECONDITION: optimized_graph_ must be sorted topologically.
GraphDef* graph = graph_view_->graph();
auto node_to_idx = BuildNodeToIdx(*graph);
const int num_nodes = graph->node_size();
// Set up a compressed version of the graph to save a constant factor in the
// expensive algorithm below. Also cache the set of control outputs and the
// highest index of a target of any control output from each node.
int num_controls = 0;
std::vector<gtl::InlinedVector<int, 4>> inputs(num_nodes);
std::vector<gtl::InlinedVector<int, 2>> control_outputs(num_nodes);
for (int node_idx = 0; node_idx < num_nodes; ++node_idx) {
const NodeDef& node = graph->node(node_idx);
if (ModifiesFrameInfo(node) || !HasOpDef(node)) {
// Ignore function nodes and nodes that modify frame info.
continue;
}
for (const string& input : node.input()) {
const NodeDef* input_node = graph_view_->GetNode(NodeName(input));
if (ModifiesFrameInfo(*input_node) || IsMerge(*input_node)) {
// Ignore edges from nodes that modify frame info and from Merge nodes,
// because we cannot know which of it's input paths executes.
continue;
}
const int input_node_idx = node_to_idx[input_node];
inputs[node_idx].push_back(input_node_idx);
if (IsControlInput(input)) {
++num_controls;
control_outputs[input_node_idx].emplace_back(node_idx);
}
}
}
// Run the longest path in DAG algorithm for each source node that has control
// outputs. If, for any target node of a control output, there exists a path
// of length > 1, we can drop that control dependency.
int num_controls_removed = 0;
std::vector<int> longest_distance(num_nodes);
// Map from target_index -> set of (input_slot, source_index), representing
// the control edges to remove. We sort them in reverse order by input slot,
// such that when we swap them out so we don't clobber the
// node(target).input() repeated field.
typedef std::pair<int, int> InputSlotAndSource;
absl::flat_hash_map<int, absl::flat_hash_set<int>> control_edges_to_remove;
for (int source = 0; source < num_nodes; ++source) {
int highest_control_target = -1;
for (const auto& control_output : control_outputs[source]) {
if (control_output > highest_control_target) {
highest_control_target = control_output;
}
}
if (highest_control_target <= source) {
continue;
}
std::fill(longest_distance.begin() + source,
longest_distance.begin() + highest_control_target + 1, 0);
for (int target = source + 1; target <= highest_control_target; ++target) {
for (int input : inputs[target]) {
// If the input node is before source in the topo order, no path
// source -> input -> target can exits and we can skip it.
// Also only extend a path from the source itself or from nodes that
// have a path from source, indicated by longest_distance[input] > 0.
if (input == source ||
(input > source && longest_distance[input] > 0)) {
// If source -> input -> target is longer than the longest
// path so far from source -> target, update the longest_distance.
int candidate_longest_distance = longest_distance[input] + 1;
if (candidate_longest_distance > longest_distance[target]) {
longest_distance[target] = candidate_longest_distance;
}
}
}
}
// If the longest path from source to target of a control dependency is
// longer than 1, there exists an alternate path, and we can eliminate the
// redundant direct control dependency.
for (const auto& control_output : control_outputs[source]) {
const int target = control_output;
if (longest_distance[target] > 1) {
control_edges_to_remove[target].emplace(source);
}
}
}
for (const auto& it : control_edges_to_remove) {
const int target = it.first;
const NodeDef& target_node = graph->node(target);
const string target_node_name = target_node.name();
for (const int& source : it.second) {
const NodeDef& source_node = graph->node(source);
TF_RETURN_IF_ERROR(graph_view_->RemoveControllingFanin(
target_node_name, source_node.name()));
++num_controls_removed;
}
}
VLOG(1) << "Removed " << num_controls_removed << " out of " << num_controls
<< " control dependencies";
return Status::OK();
}
// Suppose there are cross-device control inputs to node C from multiple nodes
// that are located on another device, e.g., we have control edges:
// A->C, B->C
// where A and B are on device X and C is on device Y.
// We can reduce cross-device communication by introducing an intermediate
// NoOp node C' on device X and rewriting the control edges to:
// A->C', B->C', C'->C
Status DependencyOptimizer::GroupCrossDeviceControlEdges() {
const int num_nodes = graph_view_->graph()->node_size();
for (int i = 0; i < num_nodes; ++i) {
NodeDef* node = graph_view_->graph()->mutable_node(i);
if (node->device().empty()) continue;
// Creates new noop nodes for devices on which multiple control inputs are
// located.
// Map keyed by device name to the newly introduced Noop node for that
// device. A nullptr value means that we have only seen a single node on
// that device.
std::map<string, NodeDef*> noops;
int num_noops = 0;
auto controlling_fanins = graph_view_->GetFanin(
MutableGraphView::InputPort(node, Graph::kControlSlot));
for (const auto& controlling_fanin : controlling_fanins) {
const NodeDef* fanin_node = controlling_fanin.node;
if (!fanin_node->device().empty() &&
fanin_node->device() != node->device()) {
auto emplace_result = noops.emplace(fanin_node->device(), nullptr);
if (!emplace_result.second && emplace_result.first->second == nullptr) {
// This is the second cross-device control input from the same
// device. Creates an intermediate noop node on that device.
string group_name;
NodeDef* noop;
// Creates a fresh node name; there may be conflicting names from
// a previous iteration of the optimizer.
do {
group_name = AddPrefixToNodeName(
node->name(),
strings::StrCat("GroupCrossDeviceControlEdges_", num_noops));
noop = graph_view_->GetNode(group_name);
++num_noops;
} while (noop != nullptr);
NodeDef new_node;
new_node.set_name(group_name);
new_node.set_device(fanin_node->device());
new_node.set_op("NoOp");
emplace_result.first->second =
graph_view_->AddNode(std::move(new_node));
}
}
}
// Reroute existing control edges to go via the newly introduced NoOp nodes.
for (const auto& controlling_fanin : controlling_fanins) {
auto it = noops.find(controlling_fanin.node->device());
if (it != noops.end() && it->second != nullptr) {
TF_RETURN_IF_ERROR(graph_view_->RemoveControllingFanin(
node->name(), controlling_fanin.node->name()));
TF_RETURN_IF_ERROR(graph_view_->AddControllingFanin(
it->second->name(),
{controlling_fanin.node->name(), Graph::kControlSlot}));
}
}
for (const auto& entry : noops) {
if (entry.second) {
TF_RETURN_IF_ERROR(graph_view_->AddControllingFanin(
node->name(), {entry.second->name(), Graph::kControlSlot}));
}
}
}
return Status::OK();
}
Status DependencyOptimizer::Optimize(Cluster* cluster, const GrapplerItem& item,
GraphDef* optimized_graph) {
*optimized_graph = item.graph;
nodes_to_preserve_ = item.NodesToPreserve();
fetch_nodes_known_ = !item.fetch.empty();
graph_view_.reset(new MutableGraphView(optimized_graph));
const int num_iterations = 2;
for (int iteration = 0; iteration < num_iterations; ++iteration) {
GRAPPLER_RETURN_IF_DEADLINE_EXCEEDED();
Status topo_sort_status;
// Perform topological sort to prepare the graph for transitive reduction.
topo_sort_status = TopologicalSort(optimized_graph);
if (topo_sort_status.ok()) {
// Remove redundant control dependencies.
TF_RETURN_IF_ERROR(TransitiveReduction());
} else {
LOG(ERROR) << "Iteration = " << iteration
<< ", topological sort failed with message: "
<< topo_sort_status.error_message();
}
// Turn nodes with only control outputs into NoOps, prune NoOp and Identity
// nodes.
TF_RETURN_IF_ERROR(OptimizeDependencies());
TF_RETURN_IF_ERROR(GroupCrossDeviceControlEdges());
}
return Status::OK();
}
void DependencyOptimizer::Feedback(Cluster* /*cluster*/,
const GrapplerItem& /*item*/,
const GraphDef& /*optimized_graph*/,
double /*result*/) {
// Nothing to do for DependencyOptimizer.
}
} // end namespace grappler
} // end namespace tensorflow
| 40.829231
| 80
| 0.646181
|
86074f4161f67725a2044efe8a52773c010aa181
| 4,108
|
swift
|
Swift
|
SwiftYoutube/Controller/HomeController.swift
|
MananPatel95/SwiftYoutube
|
8dea223e42f557fe5f3e2f1ca8ddd51178b84db7
|
[
"MIT"
] | null | null | null |
SwiftYoutube/Controller/HomeController.swift
|
MananPatel95/SwiftYoutube
|
8dea223e42f557fe5f3e2f1ca8ddd51178b84db7
|
[
"MIT"
] | null | null | null |
SwiftYoutube/Controller/HomeController.swift
|
MananPatel95/SwiftYoutube
|
8dea223e42f557fe5f3e2f1ca8ddd51178b84db7
|
[
"MIT"
] | null | null | null |
//
// ViewController.swift
// SwiftYoutube
//
// Created by Manan Patel on 2018-02-18.
// Copyright © 2018 Manan Patel. All rights reserved.
//
import UIKit
class HomeController: UICollectionViewController, UICollectionViewDelegateFlowLayout {
let menuBar: MenuBar = {
let mb = MenuBar()
return mb
}()
var videos: [Video] = {
var kanyeChannel = Channel()
kanyeChannel.name = "Kanye's Corner"
kanyeChannel.profileImageName = "kanye_profile"
var blankSpaceVideo = Video()
blankSpaceVideo.title = "Taylor Swift - Blank Space"
blankSpaceVideo.thumbnailImageName = "taylor_swift_blank_space"
blankSpaceVideo.channel = kanyeChannel
blankSpaceVideo.viewCount = 2453513569
var badBloodVideo = Video()
badBloodVideo.title = "Taylor Swift - Bad Blood featuring Kendrick Lamar"
badBloodVideo.thumbnailImageName = "taylor_swift_bad_blood"
badBloodVideo.channel = kanyeChannel
badBloodVideo.viewCount = 1243562368
return [blankSpaceVideo, badBloodVideo]
}()
override func viewDidLoad() {
super.viewDidLoad()
title = "Home"
navigationController?.navigationBar.isTranslucent = false
navigationController?.navigationBar.setBackgroundImage(UIImage(), for: .default)
navigationController?.navigationBar.shadowImage = UIImage()
let titleLabel = UILabel(frame: CGRect(x: 0, y: 0, width: view.frame.width - 32, height: view.frame.height))
titleLabel.text = "Home"
titleLabel.textColor = .white
titleLabel.font = UIFont.systemFont(ofSize: 20, weight: .medium)
navigationItem.titleView = titleLabel
collectionView?.backgroundColor = .white
collectionView?.register(VideoCell.self, forCellWithReuseIdentifier: "cellID")
collectionView?.contentInset = .init(top: 50, left: 0, bottom: 0, right: 0)
collectionView?.scrollIndicatorInsets = .init(top: 50, left: 0, bottom: 0, right: 0)
setupMenuBar()
setupNavBarButtons()
}
private func setupMenuBar() {
view.addSubview(menuBar)
menuBar.anchor(top: view.safeTopAnchor, leading: view.safeLeadingAnchor, bottom: nil, trailing: view.safeTrailingAnchor, padding: .init(top: 0, left: 0, bottom: 5, right: 0), size: .init(width: 0, height: 50))
}
func setupNavBarButtons() {
let moreinfoButton = UIBarButtonItem(image: #imageLiteral(resourceName: "nav_more_icon").withRenderingMode(.alwaysOriginal), style: .plain, target: self, action: #selector(handleMoreInfo))
let searchButton = UIBarButtonItem(image: #imageLiteral(resourceName: "search_icon").withRenderingMode(.alwaysOriginal), style: .plain, target: self, action: #selector(handleSearch))
navigationItem.rightBarButtonItems = [moreinfoButton, searchButton]
}
@objc func handleSearch() {
print("123")
}
@objc func handleMoreInfo() {
}
override func collectionView(_ collectionView: UICollectionView, numberOfItemsInSection section: Int) -> Int {
return videos.count
}
override func collectionView(_ collectionView: UICollectionView, cellForItemAt indexPath: IndexPath) -> UICollectionViewCell {
let cell = collectionView.dequeueReusableCell(withReuseIdentifier: "cellID", for: indexPath) as! VideoCell
cell.video = videos[indexPath.row]
return cell
}
func collectionView(_ collectionView: UICollectionView, layout collectionViewLayout: UICollectionViewLayout, sizeForItemAt indexPath: IndexPath) -> CGSize {
let height = ((view.frame.width - 5 - 5) * 9/16) + 60
return CGSize(width: view.frame.width, height: height)
}
func collectionView(_ collectionView: UICollectionView, layout collectionViewLayout: UICollectionViewLayout, minimumLineSpacingForSectionAt section: Int) -> CGFloat {
return 0
}
}
| 38.392523
| 217
| 0.668939
|
5f8db1793fd67184592bacd6ffb644e967f0513b
| 484
|
rb
|
Ruby
|
lib/voicebase/helpers.rb
|
usertesting/voicebase-client-ruby
|
376e77bd7c9fb0539f7acbf4b1f832335984b2f4
|
[
"MIT"
] | 2
|
2016-01-14T17:47:58.000Z
|
2017-03-10T17:38:03.000Z
|
lib/voicebase/helpers.rb
|
usertesting/voicebase-client-ruby
|
376e77bd7c9fb0539f7acbf4b1f832335984b2f4
|
[
"MIT"
] | 10
|
2016-04-14T22:54:44.000Z
|
2018-08-08T17:27:02.000Z
|
lib/voicebase/helpers.rb
|
usertesting/voicebase-client-ruby
|
376e77bd7c9fb0539f7acbf4b1f832335984b2f4
|
[
"MIT"
] | 6
|
2016-01-14T17:48:01.000Z
|
2017-10-06T16:12:18.000Z
|
module VoiceBase
module Helpers
def self.included(base)
base.send :extend, ClassMethods
base.send :include, InstanceMethods
end
module ClassMethods
# E.g. "request_status" -> "requestStatus"
def camelize_name(snake_cased_name)
snake_cased_name.to_s.camelize(:lower)
end
end
module InstanceMethods
def camelize_name(snake_cased_name)
self.class.camelize_name(snake_cased_name)
end
end
end
end
| 18.615385
| 50
| 0.67562
|
ef57b154aaec412df2a92d03824a6cbc6e6fc508
| 171
|
js
|
JavaScript
|
results/4_extract-code/code/gulp-if/gulp-if_44.js
|
proglang/dts-generate-results
|
937a89a919814372cae8ea424d88edd8f7eaf87f
|
[
"MIT"
] | 2
|
2021-04-21T16:38:38.000Z
|
2021-11-17T15:20:30.000Z
|
results/4_extract-code/code/gulp-if/gulp-if_44.js
|
proglang/dts-generate-results
|
937a89a919814372cae8ea424d88edd8f7eaf87f
|
[
"MIT"
] | null | null | null |
results/4_extract-code/code/gulp-if/gulp-if_44.js
|
proglang/dts-generate-results
|
937a89a919814372cae8ea424d88edd8f7eaf87f
|
[
"MIT"
] | 1
|
2019-10-31T18:45:26.000Z
|
2019-10-31T18:45:26.000Z
|
var uglify = require('gulp-uglify');
gulp.task('task', function() {
gulp.src(['./*.js', '!./node_modules/**'])
.pipe(uglify())
.pipe(gulp.dest('./dist/'));
});
| 21.375
| 44
| 0.54386
|
c378379c8173b51aafa4a7be1cdb0c5c51f8e1a3
| 565
|
cs
|
C#
|
src/DesertOctopus/Utilities/MethodInfoHelpers/TimeSpanMIH.cs
|
nowol/DesertOctopus
|
9d32b55056e457ae682131dae0cc32a6bf944f7f
|
[
"MIT"
] | 1
|
2017-12-17T00:44:38.000Z
|
2017-12-17T00:44:38.000Z
|
src/DesertOctopus/Utilities/MethodInfoHelpers/TimeSpanMIH.cs
|
nowol/DesertOctopus
|
9d32b55056e457ae682131dae0cc32a6bf944f7f
|
[
"MIT"
] | 14
|
2016-05-01T11:24:44.000Z
|
2020-06-16T23:55:24.000Z
|
src/DesertOctopus/Utilities/MethodInfoHelpers/TimeSpanMIH.cs
|
nowol/DesertOctopus
|
9d32b55056e457ae682131dae0cc32a6bf944f7f
|
[
"MIT"
] | null | null | null |
using System;
using System.Linq;
using System.Reflection;
namespace DesertOctopus.Utilities
{
/// <summary>
/// Helper class for TimeSpan MethodInfo
/// </summary>
internal static class TimeSpanMih
{
/// <summary>
/// Calls TimeSpan.FromTicks
/// </summary>
/// <returns>The method info for TimeSpan.FromTicks</returns>
public static MethodInfo FromTicks()
{
return typeof(TimeSpan).GetMethod(nameof(TimeSpan.FromTicks), BindingFlags.Static | BindingFlags.Public);
}
}
}
| 25.681818
| 117
| 0.630088
|
d2a6b157be4d1cf193961bf4380d809fa5dc1cba
| 1,090
|
rs
|
Rust
|
eosio-rust/crates/eosio_rpc/src/error.rs
|
7db9a/rust-eos-dev-env-starter
|
d0f7f40f24f83f840888b4386f287cd39eff1f1b
|
[
"MIT"
] | 46
|
2019-05-29T18:34:27.000Z
|
2021-09-23T05:43:10.000Z
|
eosio-rust/crates/eosio_rpc/src/error.rs
|
7db9a/rust-eos-dev-env-starter
|
d0f7f40f24f83f840888b4386f287cd39eff1f1b
|
[
"MIT"
] | 16
|
2019-06-09T05:57:15.000Z
|
2020-08-27T00:29:19.000Z
|
eosio-rust/crates/eosio_rpc/src/error.rs
|
7db9a/rust-eos-dev-env-starter
|
d0f7f40f24f83f840888b4386f287cd39eff1f1b
|
[
"MIT"
] | 18
|
2019-06-25T11:27:26.000Z
|
2021-01-06T22:33:37.000Z
|
use serde::{Deserialize, Serialize};
#[derive(Debug)]
pub enum Error {
BadRequestJson(::serde_json::Error),
BadRequest,
NoWindow,
BadResponse,
BadResponseJson(::serde_json::Error),
EosError(ErrorResponse),
}
#[cfg(feature = "use-hyper")]
impl From<hyper::Error> for Error {
fn from(err: hyper::Error) -> Self {
println!("HYPER ERROR: {:#?}", err);
Error::BadResponse
}
}
impl From<serde_json::Error> for Error {
fn from(err: serde_json::Error) -> Self {
println!("SERDE ERROR: {:#?}", err);
Error::BadResponse
}
}
#[derive(Debug, Deserialize, Serialize, Clone)]
pub struct ErrorResponse {
pub code: u16,
pub message: String,
pub error: ErrorMessage,
}
#[derive(Debug, Deserialize, Serialize, Clone)]
pub struct ErrorMessage {
pub code: u16,
pub name: String,
pub what: String,
pub details: Vec<ErrorDetails>,
}
#[derive(Debug, Deserialize, Serialize, Clone)]
pub struct ErrorDetails {
pub message: String,
pub file: String,
pub line_number: u32,
pub method: String,
}
| 21.8
| 47
| 0.640367
|
b0106280867ea69e20fdcb60880b704341cadc13
| 495
|
py
|
Python
|
js_rss_feed/cms_apps.py
|
compoundpartners/js-rss-feed
|
5dc5184e907008de73b1244b6d5e6443d775f4c0
|
[
"BSD-3-Clause"
] | null | null | null |
js_rss_feed/cms_apps.py
|
compoundpartners/js-rss-feed
|
5dc5184e907008de73b1244b6d5e6443d775f4c0
|
[
"BSD-3-Clause"
] | null | null | null |
js_rss_feed/cms_apps.py
|
compoundpartners/js-rss-feed
|
5dc5184e907008de73b1244b6d5e6443d775f4c0
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from aldryn_apphooks_config.app_base import CMSConfigApp
from cms.apphook_pool import apphook_pool
from django.utils.translation import ugettext_lazy as _
from .cms_appconfig import RSSFeedConfig
class RSSFeedApp(CMSConfigApp):
name = _('RSS Feed')
app_name = 'js_rss_feed'
app_config = RSSFeedConfig
def get_urls(self, *args, **kwargs):
return ['js_rss_feed.urls']
apphook_pool.register(RSSFeedApp)
| 23.571429
| 56
| 0.759596
|
4649c9d53d6f7e30a5ca7a2a018b001763c2ae4f
| 2,009
|
php
|
PHP
|
resources/views/v_addUnit.blade.php
|
kirchhoff19/PA2
|
a54372fe4bc0b53b721f4b8339815e5918efa079
|
[
"MIT"
] | null | null | null |
resources/views/v_addUnit.blade.php
|
kirchhoff19/PA2
|
a54372fe4bc0b53b721f4b8339815e5918efa079
|
[
"MIT"
] | null | null | null |
resources/views/v_addUnit.blade.php
|
kirchhoff19/PA2
|
a54372fe4bc0b53b721f4b8339815e5918efa079
|
[
"MIT"
] | null | null | null |
@extends('layout.v_template')
@section('title', 'Tambah Unit')
@section('content')
<!-- Horizontal Form -->
<div class="card card-info">
<div class="card-header">
<h3 class="card-title">Tambah Unit</h3>
</div>
<!-- /.card-header -->
<!-- form start -->
<form class="form-horizontal" id="quickForm" action="/admin/insertUnit" method="POST" enctype="multipart/form-data">
@csrf
<div class="card-body">
<div class="form-group row">
<label for="kode_unit" class="col-sm-2 col-form-label">Kode Unit</label>
<div class="col-sm-10">
<input type="text" class="form-control @error('kode_unit') is-invalid @enderror" id="kode_unit" name="kode_unit">
<div class="invalid-feedback">
@error('kode_unit')
<?= $message ?>
@enderror
</div>
</div>
</div>
<div class="form-group row">
<label for="nama_unit" class="col-sm-2 col-form-label">Nama Unit</label>
<div class="col-sm-10">
<input type="text" class="form-control @error('nama_unit') is-invalid @enderror" id="nama_unit" name="nama_unit">
<div class="invalid-feedback">
@error('nama_unit')
<?= $message ?>
@enderror
</div>
</div>
</div>
</div>
<!-- /.card-body -->
<div class="card-footer">
<button type="submit" class="btn btn-info">Tambah</button>
</div>
<!-- /.card-footer -->
</form>
</div>
<!-- /.card -->
@endsection
| 41
| 135
| 0.419114
|
1f7f8cbdf23ae53b5628baa3586aa33f3f0af1f8
| 1,044
|
swift
|
Swift
|
ChatbotNewFrameworkIOS/Models/Config.swift
|
25290Ajeet/ChatbotNewFrameworkIOS
|
ad352ad26ca2be2d3b71a87fe874d218692ca50a
|
[
"MIT"
] | 1
|
2019-12-31T13:27:05.000Z
|
2019-12-31T13:27:05.000Z
|
ChatbotNewFrameworkIOS/Models/Config.swift
|
25290Ajeet/ChatbotNewFrameworkIOS
|
ad352ad26ca2be2d3b71a87fe874d218692ca50a
|
[
"MIT"
] | null | null | null |
ChatbotNewFrameworkIOS/Models/Config.swift
|
25290Ajeet/ChatbotNewFrameworkIOS
|
ad352ad26ca2be2d3b71a87fe874d218692ca50a
|
[
"MIT"
] | null | null | null |
//
// Config.swift
// XavBotFramework
//
// Created by Ajeet Sharma on 30/10/19.
// Copyright © 2019 Ajeet Sharma. All rights reserved.
//
import Foundation
struct ConfigData : Codable {
let result:Config?
let status:String?
}
struct Config : Codable {
let uid:String?
let wlcm_msg:String?
let life_span:String?
let avatar:String?
let theme_colour:String?
let vhost:String?
let jid:String?
let integration:[Integration]?
}
struct Integration : Codable {
let settings:Setting?
}
struct Setting : Codable {
let button_colour:String = "#4B286D"
let carousel_color:String = "#e8e8eb"
let carousel_textcolour:String = "#000000"
let response_bubble:String = "#e8e8eb"
let response_text_icon:String = "#000000"
let sender_bubble:String = "#4B286D"
let sender_text_icon:String = "#ffffff"
let widget_textcolour:String = "#54595f"
let feedback_color:String = "#4B286D"
let theme_colour:String = "#ffffff"
let button_hover_colour:String = "#ffffff"
}
| 22.695652
| 55
| 0.683908
|
3faf1384376c22399226fb5e60574852ba2db1ab
| 530
|
rb
|
Ruby
|
lib/active_support_decorators/dependencies_patch.rb
|
Betterment/activesupport-decorators
|
53892eab981b9e57c91abb48b26aa7234c97bdec
|
[
"MIT"
] | null | null | null |
lib/active_support_decorators/dependencies_patch.rb
|
Betterment/activesupport-decorators
|
53892eab981b9e57c91abb48b26aa7234c97bdec
|
[
"MIT"
] | null | null | null |
lib/active_support_decorators/dependencies_patch.rb
|
Betterment/activesupport-decorators
|
53892eab981b9e57c91abb48b26aa7234c97bdec
|
[
"MIT"
] | null | null | null |
require 'active_support/dependencies'
module ActiveSupport::Dependencies
alias_method :require_or_load_without_multiple, :require_or_load
def require_or_load(file_name, const_path = nil)
order = ActiveSupportDecorators.load_path_order(file_name)
if ActiveSupportDecorators.debug && order.size > 1
Rails.try(:logger).try(:debug, "ActiveSupportDecorators: Loading files in order #{order.join(', ')}.")
end
order.each do |path|
require_or_load_without_multiple(path, const_path)
end
end
end
| 29.444444
| 108
| 0.756604
|
435f6952ea1db2ab1b181d509d232f8823597b82
| 1,663
|
ts
|
TypeScript
|
src/ts/controller/listeners/dragListener.ts
|
cmakler/kgjs
|
a29d194cfbfe3dcb0407b5281a34dd0ddd42bf68
|
[
"MIT"
] | 20
|
2019-03-12T12:54:04.000Z
|
2022-01-27T01:24:07.000Z
|
src/ts/controller/listeners/dragListener.ts
|
cmakler/kgjs
|
a29d194cfbfe3dcb0407b5281a34dd0ddd42bf68
|
[
"MIT"
] | 5
|
2017-07-20T17:16:17.000Z
|
2022-02-26T04:07:44.000Z
|
src/ts/controller/listeners/dragListener.ts
|
cmakler/kgjs
|
a29d194cfbfe3dcb0407b5281a34dd0ddd42bf68
|
[
"MIT"
] | 3
|
2019-10-10T03:39:14.000Z
|
2021-12-13T00:45:46.000Z
|
/// <reference path="../../kg.ts" />
module KG {
export interface DragListenerDefinition extends ListenerDefinition {
draggable?: string;
directions?: string;
vertical?: string;
horizontal?: string;
}
export interface IDragListener extends IListener {
draggable: boolean;
directions: "" | "x" | "y" | "xy";
}
/*
A DragListener is a special kind of Listener that listens for drag events.
In addition to a param and an expression, it has properties for whether it is draggable
and, if so, in which directions it is draggable.
*/
export class DragListener extends Listener implements IDragListener {
public directions;
public draggable;
constructor(def: DragListenerDefinition) {
if(def.hasOwnProperty('vertical')) {
def.directions = 'y';
def.param = def.vertical;
def.expression = `params.${def.vertical} + drag.dy`
}
if(def.hasOwnProperty('horizontal')) {
def.directions = 'x';
def.param = def.horizontal;
def.expression = `params.${def.horizontal} + drag.dx`
}
setDefaults(def, {
directions: "xy"
});
setProperties(def, 'updatables',['draggable', 'directions']);
super(def);
}
update(force) {
let dl = super.update(force);
if(!dl.def.hasOwnProperty('draggable')) {
dl.draggable = (dl.directions.length > 0);
}
return dl;
}
}
}
| 28.672414
| 95
| 0.538785
|
0d895c09f3cd16416140d78948856f4374d2ea5e
| 207
|
h
|
C
|
src/essence.game/packets/lobby/outgoing/account/SendReferralRegistrationSuccess.h
|
hinnie123/qpang-essence-emulator-1
|
2b99f21bcbcbdcd5ff8104d4845ebc10ec0e6e1b
|
[
"MIT"
] | 1
|
2021-11-23T00:31:46.000Z
|
2021-11-23T00:31:46.000Z
|
src/essence.game/packets/lobby/outgoing/account/SendReferralRegistrationSuccess.h
|
hinnie123/qpang-essence-emulator-1
|
2b99f21bcbcbdcd5ff8104d4845ebc10ec0e6e1b
|
[
"MIT"
] | null | null | null |
src/essence.game/packets/lobby/outgoing/account/SendReferralRegistrationSuccess.h
|
hinnie123/qpang-essence-emulator-1
|
2b99f21bcbcbdcd5ff8104d4845ebc10ec0e6e1b
|
[
"MIT"
] | 1
|
2021-12-18T12:50:46.000Z
|
2021-12-18T12:50:46.000Z
|
#pragma once
#include "packets/LobbyServerPacket.h"
class SendReferralRegistrationSuccess : public LobbyServerPacket
{
public:
explicit SendReferralRegistrationSuccess() : LobbyServerPacket(677)
{
}
};
| 17.25
| 68
| 0.801932
|
2c5c5e2783363b2077e9eee27879eb255213189a
| 10,079
|
py
|
Python
|
topic6_engineering_toolkits/Topic21_PythonforOptimization/LinearProgramming.py
|
sunluelectric/tutorialPython
|
2260282b979a16e3b417a748d0d70231433d5fd3
|
[
"MIT"
] | null | null | null |
topic6_engineering_toolkits/Topic21_PythonforOptimization/LinearProgramming.py
|
sunluelectric/tutorialPython
|
2260282b979a16e3b417a748d0d70231433d5fd3
|
[
"MIT"
] | null | null | null |
topic6_engineering_toolkits/Topic21_PythonforOptimization/LinearProgramming.py
|
sunluelectric/tutorialPython
|
2260282b979a16e3b417a748d0d70231433d5fd3
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Class: LinearProgramming
"""
import numpy as np
import gurobipy as gp
class LinearProgramming():
"""
LinearProgramming defines and solves a LP problem.
"""
def __init__(self, *args):
"""
__init__ creates a LP problem.
"""
try:
problem_configuration = self._listtodict(list(args))
except SyntaxError:
print("Syntax error in the configuration of the LP problem.\n")
if ("cmax" in problem_configuration.keys())\
and ("cmin" not in problem_configuration.keys()):
self.minimize_problem = False
self.costfunction_matrix = -problem_configuration.get("cmax")
elif ("cmin" in problem_configuration.keys())\
and ("cmax" not in problem_configuration.keys()):
self.minimize_problem = True
self.costfunction_matrix = problem_configuration.get("cmin")
else:
self.costfunction_matrix = None
if "ineq_a" in problem_configuration.keys():
self.inequality_constraint_a = problem_configuration.get("ineq_a")
else:
self.inequality_constraint_a = None
if "ineq_b" in problem_configuration.keys():
self.inequality_constraint_b = problem_configuration.get("ineq_b")
else:
self.inequality_constraint_b = None
if "lb" in problem_configuration.keys():
self.lower_bound = problem_configuration.get("lb")
else:
self.lower_bound = None
if "ub" in problem_configuration.keys():
self.upper_bound = problem_configuration.get("ub")
else:
self.upper_bound = None
self.consistency_flag = False
self.canonical_flag = False
# Variables used in the class
self.canonical_max_kpi = None
self.canonical_inequality_constraint_a = None
self.canonical_inequality_constraint_b = None
self.gurobi_lp_model = None
self.gurobi_lp_variable = None
self.optimum_x = None
self.optimum_y = None
def reset_costfunction_matrix(self, *args):
"""
reset_costfunction_matrix resets the cost function matrix of the LP problem.
"""
try:
problem_configuration = self._listtodict(list(args))
except SyntaxError:
print("Syntax error in the configuration of the LP problem.\n")
if ("cmax" in problem_configuration.keys())\
and ("cmin" not in problem_configuration.keys()):
self.minimize_problem = False
self.costfunction_matrix = -problem_configuration.get("cmax")
elif ("cmin" in problem_configuration.keys())\
and ("cmax" not in problem_configuration.keys()):
self.minimize_problem = True
self.costfunction_matrix = problem_configuration.get("cmin")
else:
self.costfunction_matrix = None
self.minimize_problem = None
self.consistency_flag = False
self.canonical_flag = False
def reset_inequality_constraint_a(self, inequality_constraint_a):
"""
reset_inequality_constraint_a resets the inequality constraint a
"""
self.inequality_constraint_a = inequality_constraint_a
self.consistency_flag = False
self.canonical_flag = False
def reset_inequality_constraint_b(self, inequality_constraint_b):
"""
reset_inequality_constraint_b resets the inequality constraint b
"""
self.inequality_constraint_b = inequality_constraint_b
self.consistency_flag = False
self.canonical_flag = False
def reset_lower_bound(self, lower_bound):
"""
reset_lower_bound resets the lower bound of the LP problem.
"""
self.lower_bound = lower_bound
self.consistency_flag = False
self.canonical_flag = False
def reset_upper_bound(self, upper_bound):
"""
reset_upper_bound resets the upper bound of the LP problem.
"""
self.upper_bound = upper_bound
self.consistency_flag = False
self.canonical_flag = False
@classmethod
def _listtodict(cls, input_lst):
"""
_listtodict converts an input list to a dictionary.
"""
output_op = {input_lst[i]: input_lst[i + 1] for i in range(0, len(input_lst), 2)}
return output_op
def _check_consistency(self):
"""
Check input consistency
"""
consistency_flag = True
# Check cost function and inequality matrix size constraint consistency.
if self.costfunction_matrix is not None:
variable_length = len(self.costfunction_matrix)
if (self.inequality_constraint_a is not None)\
and (self.inequality_constraint_b is not None):
inequality_constraint_length = len(self.inequality_constraint_b)
if self.inequality_constraint_a.shape[1] == variable_length:
pass
else:
print("The number of variables is not consistent\
in the cost function and in the inequality constraint.\n")
consistency_flag = False
if self.inequality_constraint_a.shape[0] == inequality_constraint_length:
pass
else:
print("The number of inequality constraint number is not consistent\
in ineq_a and ineq_b.\n")
consistency_flag = False
else:
print("The inequality constraint is not defined\
for the LP problem.\n")
consistency_flag = False
else:
print("The cost function matrix is not defined for the LP problem.\n")
consistency_flag = False
# Check upper bound and lower bound consistency.
if consistency_flag:
if self.upper_bound is not None:
if len(self.upper_bound) == variable_length:
pass
else:
print("The number of variables is not consistent\
in the cost function and in the upper bound setup\
for the LP problem.\n")
if self.lower_bound is not None:
if len(self.lower_bound) == variable_length:
pass
else:
print("The number of variables is not consistent\
in the cost function and in the lower bound setup\
for the LP problem.\n")
# Set flag
self.consistency_flag = consistency_flag
self.canonical_flag = False
def _form_canonical(self):
"""
_form_canonical forms the LP problem into canonical form.
"""
if self.consistency_flag:
variable_length = len(self.costfunction_matrix)
self.canonical_max_kpi = -self.costfunction_matrix
self.canonical_inequality_constraint_a = self.inequality_constraint_a
self.canonical_inequality_constraint_b = self.inequality_constraint_b
if self.upper_bound is not None:
self.canonical_inequality_constraint_a = np.concatenate(\
(self.canonical_inequality_constraint_a, \
np.identity(variable_length)), axis = 0)
self.canonical_inequality_constraint_b = np.concatenate(\
(self.canonical_inequality_constraint_b, \
self.upper_bound), axis = 0)
if self.lower_bound is not None:
self.canonical_inequality_constraint_a = np.concatenate(\
(self.canonical_inequality_constraint_a, \
-np.identity(variable_length)), axis = 0)
self.canonical_inequality_constraint_b = np.concatenate(\
(self.canonical_inequality_constraint_b, \
self.lower_bound), axis = 0)
self.canonical_flag = True
else:
print("The LP problem has not passed consistency check yet.\n")
def solve_gurobi(self):
"""
solve_gurobi solves the LP problem using gurobipy.model()
"""
self._check_consistency()
self._form_canonical()
if self.consistency_flag and self.canonical_flag:
variable_length = len(self.costfunction_matrix)
self.gurobi_lp_model = gp.Model()
self.gurobi_lp_model.Params.LogToConsole = 0
self.gurobi_lp_variable = self.gurobi_lp_model.addMVar(variable_length)
self.gurobi_lp_model.setMObjective(None, self.canonical_max_kpi, 0.0, \
None, None, self.gurobi_lp_variable, \
gp.GRB.MAXIMIZE)
self.gurobi_lp_model.addConstr(self.canonical_inequality_constraint_a \
@ self.gurobi_lp_variable <= \
self.canonical_inequality_constraint_b.transpose()[0]) # pylint: disable=line-too-long
self.gurobi_lp_model.update()
self.gurobi_lp_model.optimize()
self.optimum_x = self.gurobi_lp_variable.x
self.optimum_y = self.gurobi_lp_model.objVal
else:
print("The LP problem has not passed consistency check\
or formed into a canonical form yet.\n")
def display_result(self):
"""
display_result displays the result of the LP problem
"""
print("The optimum x variables are: \n")
print(self.optimum_x)
print("\n")
if self.minimize_problem:
print("The cost function value is: \n")
print(-self.optimum_y)
elif not self.minimize_problem:
print("The likelihood function value is: \n")
print(self.optimum_y)
print("\n")
| 44.597345
| 133
| 0.599861
|
1f2f62472304db66c62da8414bd60d372da5a6b0
| 3,423
|
kt
|
Kotlin
|
src/main/kotlin/com/github/kerubistan/kerub/services/impl/AbstractAssetService.kt
|
K0zka/kerub
|
99cb43c962da46df7a0beb75f2e0c839c6c50bda
|
[
"Apache-2.0"
] | 16
|
2016-01-03T12:57:25.000Z
|
2020-08-04T10:36:30.000Z
|
src/main/kotlin/com/github/kerubistan/kerub/services/impl/AbstractAssetService.kt
|
kerubistan/kerub
|
99cb43c962da46df7a0beb75f2e0c839c6c50bda
|
[
"Apache-2.0"
] | 205
|
2015-10-27T11:38:26.000Z
|
2021-05-09T11:53:18.000Z
|
src/main/kotlin/com/github/kerubistan/kerub/services/impl/AbstractAssetService.kt
|
K0zka/kerub
|
99cb43c962da46df7a0beb75f2e0c839c6c50bda
|
[
"Apache-2.0"
] | 4
|
2018-01-26T14:07:12.000Z
|
2019-12-05T20:49:30.000Z
|
package com.github.kerubistan.kerub.services.impl
import com.github.kerubistan.kerub.data.AssetDao
import com.github.kerubistan.kerub.model.Asset
import com.github.kerubistan.kerub.model.AssetOwner
import com.github.kerubistan.kerub.model.AssetOwnerType
import com.github.kerubistan.kerub.model.paging.SearchResultPage
import com.github.kerubistan.kerub.model.paging.SortResultPage
import com.github.kerubistan.kerub.security.AssetAccessController
import com.github.kerubistan.kerub.services.AssetService
import java.util.UUID
abstract class AbstractAssetService<T : Asset>(
val accessController: AssetAccessController,
override val dao: AssetDao<T>,
entityType: String
) : ListableBaseService<T>(entityType), AssetService<T> {
override fun listByOwner(
start: Long, limit: Int, sort: String, ownerType: AssetOwnerType, ownerId: UUID
): SortResultPage<T> {
val list = dao.listByOwner(
owner = AssetOwner(ownerId, ownerType),
start = start,
limit = limit,
sort = sort
)
return SortResultPage(
start = start,
count = list.size.toLong(),
result = list,
sortBy = sort,
total = list.size.toLong() //TODO
)
}
override fun search(
field: String, value: String, start: Long, limit: Int, ownerType: AssetOwnerType, ownerId: UUID
): SearchResultPage<T> {
val list = dao.fieldSearch(
setOf(AssetOwner(ownerId, ownerType)),
field,
value
)
return SearchResultPage(
start = start,
count = list.size.toLong(),
result = list,
total = list.size.toLong(), //TODO
searchby = field
)
}
override fun getById(id: UUID): T =
assertExist(entityType, accessController.doAndCheck {
super.getById(id)
}, id)
override fun update(id: UUID, entity: T): T =
assertExist(entityType, accessController.checkAndDo(entity) {
super.update(id, entity)
}, id)
final override fun delete(id: UUID) {
val entity: T = assertExist(entityType, accessController.doAndCheck { dao[id] }, id)
beforeRemove(entity)
doRemove(entity)
afterRemove(entity)
}
open fun doRemove(entity: T) {
dao.remove(entity)
}
open fun afterRemove(entity: T) {
// usually nothing to do after remove, but some virtual resources may need post-action
}
open fun beforeRemove(entity: T) {
//it would be nice to have a generic validation here, like incoming references
}
override fun add(entity: T): T =
accessController.checkAndDo(asset = entity) {
super.add(entity)
} ?: entity
override fun listAll(start: Long, limit: Int, sort: String): SortResultPage<T> =
accessController.listWithFilter(dao, start, limit, sort)
override fun getByName(name: String): List<T> = accessController.filter(dao.getByName(name) as List<T>)
override fun search(field: String, value: String, start: Long, limit: Int): SearchResultPage<T> =
accessController.searchWithFilter(dao, field, value, start, limit)
override fun getByNameAndOwner(ownerType: AssetOwnerType, ownerId: UUID, name: String): List<T> =
TODO("https://github.com/kerubistan/kerub/issues/173")
override fun autoName(): String {
//TODO: this is checking globally, it should only be allowed when accounts are not mandatory
var nr = dao.count() + 1
var name = "$entityType-$nr"
while (dao.existsByName(name)) {
nr++
name = "$entityType-$nr"
}
return name
}
override fun autoName(ownerType: AssetOwnerType, ownerId: UUID): String {
TODO()
}
}
| 29.25641
| 104
| 0.721297
|
b75a160f133e6e3a1393d66c519e20d8633d5515
| 511
|
cs
|
C#
|
5.1. Defining Classes - Exercise/MyHelpLibrary/DateModifier.cs
|
dzhanetGerdzhikova/Advanced
|
2b378fba9e74f381eadcfb37d2691c2013c0bb79
|
[
"MIT"
] | null | null | null |
5.1. Defining Classes - Exercise/MyHelpLibrary/DateModifier.cs
|
dzhanetGerdzhikova/Advanced
|
2b378fba9e74f381eadcfb37d2691c2013c0bb79
|
[
"MIT"
] | null | null | null |
5.1. Defining Classes - Exercise/MyHelpLibrary/DateModifier.cs
|
dzhanetGerdzhikova/Advanced
|
2b378fba9e74f381eadcfb37d2691c2013c0bb79
|
[
"MIT"
] | null | null | null |
using System;
namespace MyHelpLibrary
{
public static class DateModifier
{
public static int DifferenceInDates { get; set; }
public static int DifferenceOfDays(string start, string end)
{
DateTime startDate = DateTime.Parse(start);
DateTime endDate = DateTime.Parse(end);
TimeSpan difference = startDate - endDate;
DifferenceInDates = (int)Math.Abs(difference.TotalDays);
return DifferenceInDates;
}
}
}
| 26.894737
| 68
| 0.626223
|
386ea69a6bd4ceba7949e7355285d397b1b10503
| 940
|
php
|
PHP
|
mod/invitefriends/languages/ja.php
|
usher-sky/elgg
|
fe71c4995ce3994111533a00623808c4ab80cf25
|
[
"MIT"
] | null | null | null |
mod/invitefriends/languages/ja.php
|
usher-sky/elgg
|
fe71c4995ce3994111533a00623808c4ab80cf25
|
[
"MIT"
] | null | null | null |
mod/invitefriends/languages/ja.php
|
usher-sky/elgg
|
fe71c4995ce3994111533a00623808c4ab80cf25
|
[
"MIT"
] | null | null | null |
<?php
return [
'friends:invite' => '友達を招待する',
'invitefriends:registration_disabled' => 'このサイトでは新規ユーザ登録はできないように設定されていまので、新しいユーザを招待することはできません。',
'invitefriends:introduction' => '記入力欄にEメールアドレスと招待のメッセージを入力して、このネットワークに友達を招待しましょう。',
'invitefriends:emails' => 'Eメールアドレス (一行に一件ずつ)',
'invitefriends:message' => 'メッセージ',
'invitefriends:subject' => '【%s】への招待状',
'invitefriends:success' => 'あなたの友達を招待しました。',
'invitefriends:invitations_sent' => '招待状を送りました: %s 。その際、以下の問題が発生しました:',
'invitefriends:email_error' => '招待状を送信しましたが、次のアドレスはどこか間違っている為、送信できませんでした。: %s',
'invitefriends:already_members' => '以下の方は、すでにメンバです。: %s',
'invitefriends:noemails' => '電子メールアドレスが入力されていません。',
'invitefriends:message:default' => 'こんにちは、
あなたをSNSサイト %s へお誘いしようと思っております。
ぜひご参加いただけたらと思います。
',
'invitefriends:email' => 'SNSサイト %1$s への招待状をお届けいたします。
%2$s 様から下記のメッッセージをお預かりしております:
%3$s
ご参加いただける場合は、下のリンクをクリックしてください:
%4$s
アカウントを作成すると自動的に友達として登録されます。',
];
| 24.736842
| 97
| 0.741489
|
92f3248ea6fa5830a27fc6e64626bad84a998877
| 310
|
rb
|
Ruby
|
lib/rrserver.rb
|
raymon1/ruby-rack-server
|
ee89b6905b8028a0827bfcddda28d6744335a436
|
[
"MIT"
] | null | null | null |
lib/rrserver.rb
|
raymon1/ruby-rack-server
|
ee89b6905b8028a0827bfcddda28d6744335a436
|
[
"MIT"
] | null | null | null |
lib/rrserver.rb
|
raymon1/ruby-rack-server
|
ee89b6905b8028a0827bfcddda28d6744335a436
|
[
"MIT"
] | null | null | null |
require "rack"
require "slop"
require "socket"
require "time"
require "uri"
require "rrserver/cli"
require "rrserver/http"
require "rrserver/launcher"
require "rrserver/logger"
require "rrserver/server"
require "rrserver/version"
module Rrserver
def self.logger
@logger ||= Rrserver::Logger.new
end
end
| 15.5
| 34
| 0.76129
|
937b54ef05f9b15222e0ae518e958e526eaf9697
| 8,387
|
cs
|
C#
|
src/BinSkim.Rules/ELFRules/BA3030.UseCheckedFunctionsWithGCC.cs
|
michaelcfanning/binskim
|
e010eeb1f4bc5f355298c44247de5fd384100c39
|
[
"MIT"
] | 1
|
2022-02-27T10:54:19.000Z
|
2022-02-27T10:54:19.000Z
|
src/BinSkim.Rules/ELFRules/BA3030.UseCheckedFunctionsWithGCC.cs
|
michaelcfanning/binskim
|
e010eeb1f4bc5f355298c44247de5fd384100c39
|
[
"MIT"
] | null | null | null |
src/BinSkim.Rules/ELFRules/BA3030.UseCheckedFunctionsWithGCC.cs
|
michaelcfanning/binskim
|
e010eeb1f4bc5f355298c44247de5fd384100c39
|
[
"MIT"
] | null | null | null |
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
using System.Collections.Generic;
using System.Composition;
using System.Linq;
using ELFSharp.ELF;
using ELFSharp.ELF.Sections;
using Microsoft.CodeAnalysis.BinaryParsers;
using Microsoft.CodeAnalysis.IL.Sdk;
using Microsoft.CodeAnalysis.Sarif;
using Microsoft.CodeAnalysis.Sarif.Driver;
namespace Microsoft.CodeAnalysis.IL.Rules
{
[Export(typeof(Skimmer<BinaryAnalyzerContext>)), Export(typeof(ReportingDescriptor))]
public class UseCheckedFunctionsWithGcc : ELFBinarySkimmerBase
{
// This list comes from listing all of the functions available in glibc (using readelf),
// then filtering to ones with a checked variant (_*_chk).
private static readonly string[] fortifiableFunctionNames = new string[]{
"asprintf",
"confstr",
"dprintf",
"fdelt",
"fgets",
"fgetws",
"fprintf",
"fread",
"fwprintf",
"getcwd",
"getdomainname",
"getgroups",
"gethostname",
"gets",
"getwd",
"longjmp",
"mbsnrtowcs",
"mbsrtowcs",
"mbstowcs",
"memcpy",
"memmove",
"mempcpy",
"memset",
"poll",
"ppoll",
"pread",
"printf",
"read",
"readlink",
"readlinkat",
"realpath",
"recv",
"recvfrom",
"snprintf",
"sprintf",
"stack",
"stpcpy",
"stpncpy",
"strcat",
"strcpy",
"strncat",
"strncpy",
"swprintf",
"syslog",
"vasprintf",
"vdprintf",
"vfprintf",
"vfwprintf",
"vprintf",
"vsnprintf",
"vsprintf",
"vswprintf",
"vsyslog",
"vwprintf",
"wcpcpy",
"wcpncpy",
"wcrtomb",
"wcscat",
"wcscpy",
"wcsncat",
"wcsncpy",
"wcsnrtombs",
"wcsrtombs",
"wcstombs",
"wctomb",
"wmemcpy",
"wmemmove",
"wmempcpy",
"wmemset",
"wprintf"
};
private static readonly HashSet<string> unfortifiedFunctions =
new HashSet<string>(fortifiableFunctionNames);
private static readonly HashSet<string> fortifiedFunctions =
new HashSet<string>(fortifiableFunctionNames.Select(f => "__" + f + "_chk"));
/// <summary>
/// BA3030
/// </summary>
public override string Id => RuleIds.UseCheckedFunctionsWithGcc;
/// <summary>
/// The stack protector ensures that all functions that use buffers over a certain size will
// use a stack cookie(and check it) to prevent stack based buffer overflows, exiting if stack
// smashing is detected.Use '--fstack-protector-strong' (all buffers of 4 bytes or more) or
// '--fstack-protector-all' (all functions) to enable this.
/// </summary>
public override MultiformatMessageString FullDescription => new MultiformatMessageString { Text = RuleResources.BA3030_UseCheckedFunctionsWithGcc_Description };
protected override IEnumerable<string> MessageResourceNames => new string[] {
nameof(RuleResources.BA3030_Pass_AllFunctionsChecked),
nameof(RuleResources.BA3030_Pass_SomeFunctionsChecked),
nameof(RuleResources.BA3030_Pass_NoCheckableFunctions),
nameof(RuleResources.BA3030_Error),
nameof(RuleResources.NotApplicable_InvalidMetadata)
};
public override AnalysisApplicability CanAnalyzeElf(ELFBinary target, Sarif.PropertiesDictionary policy, out string reasonForNotAnalyzing)
{
IELF elf = target.ELF;
if (elf.Type == FileType.Core || elf.Type == FileType.None || elf.Type == FileType.Relocatable)
{
reasonForNotAnalyzing = MetadataConditions.ElfIsCoreNoneOrObject;
return AnalysisApplicability.NotApplicableToSpecifiedTarget;
}
// We check for "any usage of non-gcc" as a default/standard compilation with clang leads to [GCC, Clang]
// either because it links with a gcc-compiled object (cstdlib) or the linker also reading as GCC.
// This has a potential for a False Negative if teams are using GCC and other tools.
if (target.Compilers.Any(c => c.Compiler != ELFCompilerType.GCC))
{
reasonForNotAnalyzing = MetadataConditions.ElfNotBuiltWithGcc;
return AnalysisApplicability.NotApplicableToSpecifiedTarget;
}
reasonForNotAnalyzing = null;
return AnalysisApplicability.ApplicableToSpecifiedTarget;
}
/// <summary>
/// Checks if Fortified functions are used--the -DFORTIFY_SOURCE=2 flag enables these when -O2 is enabled.
///
/// Check implementation:
/// -Get all function symbols in the ELF binary
/// -Check for any fortified functions--if we find any, we used the option.
/// -Check for any unfortified functions. If we only find unfortified functions, one of two things is true:
/// 1) Fortify Source wasn't used; or
/// 2) Fortify Source was used, but gcc/clang was unable to statically find anything that needed to be fortified.
/// We report on both cases.
/// -If no fortifiable functions were used at all, the rule doesn't apply.
/// </summary>
public override void Analyze(BinaryAnalyzerContext context)
{
IELF elf = context.ELFBinary().ELF;
IEnumerable<ISymbolEntry> symbols =
ELFUtility.GetAllSymbols(elf).Where(sym => sym.Type == SymbolType.Function || sym.Type == SymbolType.Object);
var protectedFunctions = new List<ISymbolEntry>();
var unprotectedFunctions = new List<ISymbolEntry>();
foreach (ISymbolEntry e in symbols)
{
if (unfortifiedFunctions.Contains(e.Name))
{
unprotectedFunctions.Add(e);
}
else if (fortifiedFunctions.Contains(e.Name))
{
protectedFunctions.Add(e);
}
}
if (protectedFunctions.Any())
{
if (unprotectedFunctions.Any())
{
context.Logger.Log(this,
RuleUtilities.BuildResult(ResultKind.Pass, context, null,
nameof(RuleResources.BA3030_Pass_SomeFunctionsChecked),
context.TargetUri.GetFileName()));
}
else
{
context.Logger.Log(this,
RuleUtilities.BuildResult(ResultKind.Pass, context, null,
nameof(RuleResources.BA3030_Pass_AllFunctionsChecked),
context.TargetUri.GetFileName()));
}
}
else if (unprotectedFunctions.Any())
{
context.Logger.Log(this,
RuleUtilities.BuildResult(FailureLevel.Error, context, null,
nameof(RuleResources.BA3030_Error),
context.TargetUri.GetFileName()));
}
else
{
context.Logger.Log(this,
RuleUtilities.BuildResult(ResultKind.Pass, context, null,
nameof(RuleResources.BA3030_Pass_NoCheckableFunctions),
context.TargetUri.GetFileName()));
}
}
}
}
| 39.375587
| 169
| 0.538214
|
a43e3fd0cda516003e62c625f459f1fd868fe8ac
| 3,579
|
php
|
PHP
|
app/Http/Controllers/PaymentManagement/PaymentController.php
|
sasori69/backend-ecommerce
|
04bb1395bef6a49a1412a0004b6044df9e740628
|
[
"MIT"
] | null | null | null |
app/Http/Controllers/PaymentManagement/PaymentController.php
|
sasori69/backend-ecommerce
|
04bb1395bef6a49a1412a0004b6044df9e740628
|
[
"MIT"
] | null | null | null |
app/Http/Controllers/PaymentManagement/PaymentController.php
|
sasori69/backend-ecommerce
|
04bb1395bef6a49a1412a0004b6044df9e740628
|
[
"MIT"
] | null | null | null |
<?php
namespace App\Http\Controllers\PaymentManagement;
use Illuminate\Http\Request;
use App\Http\Controllers\Controller;
use App\Payment;
use Yajra\Datatables\Datatables;
class PaymentController extends Controller
{
//Protected module payment-method by slug
public function __construct()
{
$this->middleware('perm.acc:payment');
}
/**
* Display a listing of the resource.
*
* @return \Illuminate\Http\Response
*/
//public index payment-method
public function index()
{
return view('panel.payment-management.payment.index');
}
/**
* Show the form for creating a new resource.
*
* @return \Illuminate\Http\Response
*/
//view form create
public function create()
{
return view('panel.payment-management.payment.form-create');
}
/**
* Store a newly created resource in storage.
*
* @param \Illuminate\Http\Request $request
* @return \Illuminate\Http\Response
*/
//store data carriers
public function store(Request $request)
{
$payment = new PaymentMethod();
$payment->name = $request->name;
$payment->norek = $request->norek;
$payment->save();
return redirect()->route('payment-method.index')->with('toastr', 'new');
}
/**
* Display the specified resource.
*
* @param int $id
* @return \Illuminate\Http\Response
*/
//for getting datatable at index
public function show(Request $request, $action){
$payments = payment::all();
return Datatables::of($payments)
->addColumn('action', function ($payment) {
return
'<a class="btn btn-success btn-sm" href="'.route('payment.create'/*,['id' => $payment->id]*/).'">
<i class="fa fa-search"></i> View</a>'.
'<form style="display:inline;" method="POST" action="'.
route('payment-method.destroy',['id' => $payment->id]).'">'.method_field('DELETE').csrf_field().
'<button type="button" class="btn btn-danger btn-sm" onclick="removeList($(this))"><i class="fa fa-remove"></i> Remove</button></form>';
})
->rawColumns(['status', 'action'])
->make(true);
}
/**
* Show the form for editing the specified resource.
*
* @param int $id
* @return \Illuminate\Http\Response
*/
//view form edit
public function edit()
{
return view('panel.order-management.payment.form-edit');
}
/**
* Update the specified resource in storage.
*
* @param \Illuminate\Http\Request $request
* @param int $id
* @return \Illuminate\Http\Response
*/
//update data payment
public function update(Request $request, $id)
{
$payment = PaymentMethod::find($id);
$payment->name = $request->name;
$payment->norek = $request->norek;
$payment->save();
return redirect()->route('payment-method.index')->with('update', 'Payment Method updated!');
}
/**
* Remove the specified resource from storage.
*
* @param int $id
* @return \Illuminate\Http\Response
*/
//delete data payment
public function destroy($id)
{
$payment = PaymentMethod::find($id);
$payment->delete();
return redirect()->route('payment-method.index')->with('dlt', 'Payment Method deleted!');
}
}
| 28.632
| 161
| 0.568315
|
f877fb05ff1bfc8e97069f590cff703a2ea4fd1a
| 1,480
|
rs
|
Rust
|
src/base/stage.rs
|
mitrid-labs/mitrid-core
|
e28f237ea52cf0421485a1082636d469db4528de
|
[
"Apache-2.0"
] | 4
|
2018-09-30T23:16:37.000Z
|
2020-02-13T15:57:46.000Z
|
src/base/stage.rs
|
mitrid-labs/mitrid-core
|
e28f237ea52cf0421485a1082636d469db4528de
|
[
"Apache-2.0"
] | null | null | null |
src/base/stage.rs
|
mitrid-labs/mitrid-core
|
e28f237ea52cf0421485a1082636d469db4528de
|
[
"Apache-2.0"
] | null | null | null |
//! # Stage
//!
//! `stage` is the module providing the type describing the distributed ledger stage (development,
//! testing or production).
use std::fmt;
use base::Result;
use base::Checkable;
use base::Sizable;
use base::Datable;
use base::Serializable;
/// Enum representing the distributed ledger stage (development, testing or production).
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Debug, Hash, Serialize, Deserialize)]
#[repr(u8)]
pub enum Stage {
/// Development stage.
Development,
/// Testing stage.
Testing,
/// Production stage.
Production,
}
impl Stage {
/// Parses a `Stage` from a `&str`.
pub fn parse(s: &str) -> Result<Stage> {
match s {
"development" => Ok(Stage::Development),
"testing" => Ok(Stage::Testing),
"production" => Ok(Stage::Production),
_ => Err("unknown stage".into())
}
}
}
impl fmt::Display for Stage {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
Stage::Development => write!(f, "development"),
Stage::Testing => write!(f, "testing"),
Stage::Production => write!(f, "production"),
}
}
}
impl Default for Stage {
fn default() -> Stage {
Stage::Development
}
}
impl Sizable for Stage {
fn size(&self) -> u64 {
0u8.size()
}
}
impl Checkable for Stage {}
impl Serializable for Stage {}
impl Datable for Stage {}
| 23.125
| 98
| 0.593243
|
df46e5931b022e12e2e974643b8b189b51607a4c
| 2,482
|
cs
|
C#
|
Server/Repository/Perform.cs
|
enviriot/EnviriotSW
|
807d64805c4a71328cfc5ba6052b0f2eee18b33d
|
[
"MIT"
] | null | null | null |
Server/Repository/Perform.cs
|
enviriot/EnviriotSW
|
807d64805c4a71328cfc5ba6052b0f2eee18b33d
|
[
"MIT"
] | null | null | null |
Server/Repository/Perform.cs
|
enviriot/EnviriotSW
|
807d64805c4a71328cfc5ba6052b0f2eee18b33d
|
[
"MIT"
] | 1
|
2017-04-20T01:53:55.000Z
|
2017-04-20T01:53:55.000Z
|
///<remarks>This file is part of the <see cref="https://github.com/enviriot">Enviriot</see> project.<remarks>
using LiteDB;
using NiL.JS.Core;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
namespace X13.Repository {
public class Perform : IComparable<Perform> {
internal static Perform Create(Topic src, Art art, Topic prim) {
Perform r;
r = new Perform(art, src, prim);
r.o = null;
r.i = 0;
return r;
}
internal static Perform Create(Topic src, JSValue val, Topic prim) {
Perform r;
r = new Perform(Art.setState, src, prim);
r.o = val;
r.i = 0;
return r;
}
internal static Perform Create(Topic src, string fName, JSValue val, Topic prim) {
Perform r;
r = new Perform(Art.setField, src, prim);
r.o = fName;
r.f_v = val;
r.i = 0;
return r;
}
internal object o;
internal int i;
internal object old_o;
internal JSValue f_v;
public readonly Topic src;
public Topic prim { get; internal set; }
public Art art { get; internal set; }
public string FieldPath { get { return this.art == Art.changedField ? (o as string) : null; } }
private Perform(Art art, Topic src, Topic prim) {
this.src = src;
this.art = art;
this.prim = prim;
}
internal bool EqualsGr(Perform other) {
return (this.art == Art.setState || this.art == Art.changedState)
&& other != null
&& this.src == other.src
&& (((int)this.art) >> 2) == (((int)other.art) >> 2);
}
public int CompareTo(Perform other) {
if(other == null) {
return -1;
}
int p1 = ((int)this.art) >> 2;
int p2 = (int)(other.art) >> 2;
if(p1 != p2) {
return p1.CompareTo(p2);
}
if(this.src == other.src && (this.art == Art.setState || this.art == Art.changedState)) {
return 0;
}
return -1; // сохраняется порядок поступления
}
public override string ToString() {
return string.Concat(src.path, "[", art.ToString(), "]=", o == null ? "null" : o.ToString());
}
public enum Art {
move = 1,
create = 2,
subscribe = 4,
unsubscribe = 8,
setField = 12,
changedField = 14,
setState = 16,
changedState = 18,
remove = 20,
subAck = 24,
}
}
}
| 28.204545
| 111
| 0.547542
|
254848702bc11c21435115c49fd4ada1113c2b4a
| 962
|
cs
|
C#
|
4.OOPBasic/01_DefiningClasses/Exercise_02_DateModifier/DateModifier.cs
|
Brankovanov/SoftUniCourses
|
39648a863c29e00057a6969c63b19cfc7cfb8f07
|
[
"MIT"
] | 1
|
2018-07-17T07:43:49.000Z
|
2018-07-17T07:43:49.000Z
|
4.OOPBasic/01_DefiningClasses/Exercise_02_DateModifier/DateModifier.cs
|
Brankovanov/SoftUniCourses
|
39648a863c29e00057a6969c63b19cfc7cfb8f07
|
[
"MIT"
] | null | null | null |
4.OOPBasic/01_DefiningClasses/Exercise_02_DateModifier/DateModifier.cs
|
Brankovanov/SoftUniCourses
|
39648a863c29e00057a6969c63b19cfc7cfb8f07
|
[
"MIT"
] | null | null | null |
using System;
namespace Exercise_02_DateModifier
{
//Records the starting and ending date and calculates how many days are between them.
public class DateModifier
{
private string start;
private string end;
public string Start
{
get
{
return this.start;
}
set
{
this.start = value;
}
}
public string End
{
get
{
return this.end;
}
set
{
this.end = value;
}
}
public DateModifier(string start, string end)
{
this.start = start;
this.end = end;
}
public double CountDays(string start, string end)
{
return Math.Abs((Convert.ToDateTime(start) - Convert.ToDateTime(end)).TotalDays);
}
}
}
| 20.913043
| 93
| 0.452183
|
02ed6979d1cd650803fe221fcffa0620886a7dd7
| 10,872
|
psm1
|
PowerShell
|
DSCResources/MSFT_WebApplicationProxy/MSFT_WebApplicationProxy.psm1
|
X-Guardian/WebApplicationProxyDsc
|
006f4aee96a022bcbdeb32d1141acb961576d182
|
[
"MIT"
] | 1
|
2020-06-03T16:29:11.000Z
|
2020-06-03T16:29:11.000Z
|
DSCResources/MSFT_WebApplicationProxy/MSFT_WebApplicationProxy.psm1
|
X-Guardian/WebApplicationProxyDsc
|
006f4aee96a022bcbdeb32d1141acb961576d182
|
[
"MIT"
] | null | null | null |
DSCResources/MSFT_WebApplicationProxy/MSFT_WebApplicationProxy.psm1
|
X-Guardian/WebApplicationProxyDsc
|
006f4aee96a022bcbdeb32d1141acb961576d182
|
[
"MIT"
] | null | null | null |
<#
.SYNOPSIS
DSC module for the Web Application Proxy resource
.DESCRIPTION
The WebApplicationProxy DSC resource manages the installation of the Web Application Proxy server role. To
further manage the configuration of WebApplicationProxy, the WebApplicationProxyConfiguration DSC resource
should be used.
Note: removal of the Web Application Proxy role using this resource is not supported. Remove the
Web-Application-Proxy role from the server instead.
.PARAMETER FederationServiceName
Key - String
Specifies the name of a Federation Service. This is the Federation Service for which Web Application Proxy
provides AD FS proxy functionality and stores the configuration of the Federation Service.
.PARAMETER CertificateThumbprint
Required - String
Specifies the certificate thumbprint of the certificate that Web Application Proxy presents to users to
identify the Web Application Proxy as a proxy for the Federation Service. The certificate must be in the
Personal store for the local computer. You can use a simple certificate, a subject alternative name (SAN)
certificate, or a wildcard certificate.
.PARAMETER FederationServiceTrustCredential
Required - String
Specifies a PSCredential object that contains the credentials of the AD FS identity that is authorized to
register new Federation server proxies. Specify an account that has permissions to manage the Federation
Service.
.PARAMETER ForwardProxy
Write - String
Specifies the DNS name and port number of an HTTP proxy that this federation server proxy uses to obtain access
to the federation service. Specify the value for this parameter in the following format: FQDN:PortNumber.
Note: This parameter applies only to Federation Services proxy. It does not apply for application publishing.
.PARAMETER HttpsPort
Write - Sint32
Specifies the HTTPS port for the Web Application Proxy server. The default value is 443.
.PARAMETER TlsClientPort
Write - Sint32
Specifies the port for the TLS client. Web Application Proxy uses this port for user certificate
authentication. The default value is 49443.
#>
Set-StrictMode -Version 2.0
$script:dscModuleName = 'WebApplicationProxyDsc'
$script:PSModuleName = 'WebApplicationProxy'
$script:dscResourceName = [System.IO.Path]::GetFileNameWithoutExtension($MyInvocation.MyCommand.Name)
$script:resourceModulePath = Split-Path -Path (Split-Path -Path $PSScriptRoot -Parent) -Parent
$script:modulesFolderPath = Join-Path -Path $script:resourceModulePath -ChildPath 'Modules'
$script:localizationModulePath = Join-Path -Path $script:modulesFolderPath -ChildPath "$($script:DSCModuleName).Common"
Import-Module -Name (Join-Path -Path $script:localizationModulePath -ChildPath "$($script:dscModuleName).Common.psm1")
$script:localizedData = Get-LocalizedData -ResourceName $script:dscResourceName
function Get-TargetResource
{
<#
.SYNOPSIS
Get-TargetResource
.NOTES
Used Resource PowerShell Cmdlets:
- Get-WebApplicationProxySslCertificate - https://docs.microsoft.com/en-us/powershell/module/webapplicationproxy/get-webapplicationproxysslcertificate
#>
[CmdletBinding()]
[OutputType([System.Collections.Hashtable])]
param
(
[Parameter(Mandatory = $true)]
[System.String]
$FederationServiceName,
[Parameter(Mandatory = $true)]
[System.String]
$CertificateThumbprint,
[Parameter(Mandatory = $true)]
[System.Management.Automation.PSCredential]
$FederationServiceTrustCredential
)
# Check of the Resource PowerShell module is installed
Assert-Module -ModuleName $script:PSModuleName
Write-Verbose -Message ($script:localizedData.GettingResourceMessage -f $FederationServiceName)
# Check if the Web Application Proxy service has been configured
if ((Get-WebApplicationProxyConfigurationStatus) -eq 'Configured')
{
try
{
$wapProxyService = Get-CimInstance -Namespace 'root/ADFS' `
-ClassName 'ProxyService' -Verbose:$false
}
catch
{
$errorMessage = $script:localizedData.GettingWapProxyServiceError -f $FederationServiceName
New-InvalidOperationException -Message $errorMessage -ErrorRecord $_
}
try
{
$sslCertificateInfo = Get-WebApplicationProxySslCertificate | Select-Object -First 1
}
catch
{
$errorMessage = $script:localizedData.GettingWapSslCertificateError -f $FederationServiceName
New-InvalidOperationException -Message $errorMessage -ErrorRecord $_
}
if ($sslCertificateInfo)
{
$certificateThumbprint = $sslCertificateInfo.CertificateHash
}
else
{
$errorMessage = $script:localizedData.GettingWapSslCertificateError -f $FederationServiceName
New-InvalidOperationException -Message $errorMessage
}
$returnValue = @{
FederationServiceName = $wapProxyService.HostName
CertificateThumbprint = $certificateThumbprint
FederationServiceTrustCredential = $FederationServiceTrustcredential
ForwardProxy = $wapProxyService.ForwardHttpProxyAddress
HttpsPort = $wapProxyService.HostHttpsPort
TlsClientPort = $wapProxyService.TlsClientPort
Ensure = 'Present'
}
}
else
{
Write-Verbose -Message ($script:localizedData.ResourceNotFoundMessage -f $FederationServiceName)
$returnValue = @{
FederationServiceName = $FederationServiceName
CertificateThumbprint = $CertificateThumbprint
FederationServiceTrustCredential = $FederationServiceTrustcredential
ForwardProxy = $null
HttpsPort = $null
TlsClientPort = $null
Ensure = 'Absent'
}
}
$returnValue
}
function Set-TargetResource
{
<#
.SYNOPSIS
Set-TargetResource
.NOTES
Used Resource PowerShell Cmdlets:
- Install-WebApplicationProxy - https://docs.microsoft.com/en-us/powershell/module/webapplicationproxy/install-webapplicationproxy
Install-WebApplicationProxy returns a [Microsoft.IdentityServer.Deployment.Core.Result] object with
the following properties:
Context - string
Message - string
Status - Microsoft.IdentityServer.Deployment.Core.ResultStatus
Examples:
Message : The configuration completed successfully.
Context : DeploymentSucceeded
Status : Success
#>
[CmdletBinding()]
param
(
[Parameter(Mandatory = $true)]
[System.String]
$FederationServiceName,
[Parameter(Mandatory = $true)]
[System.String]
$CertificateThumbprint,
[Parameter(Mandatory = $true)]
[System.Management.Automation.PSCredential]
$FederationServiceTrustCredential,
[Parameter()]
[System.String]
$ForwardProxy,
[Parameter()]
[System.Int32]
$HttpsPort,
[Parameter()]
[System.Int32]
$TlsClientPort
)
# Remove any parameters not used in Splats
[HashTable]$parameters = $PSBoundParameters
$parameters.Remove('Verbose')
$getTargetResourceParms = @{
FederationServiceName = $FederationServiceName
CertificateThumbprint = $CertificateThumbprint
FederationServiceTrustCredential = $FederationServiceTrustCredential
}
$targetResource = Get-TargetResource @getTargetResourceParms
# Web Application Proxy Service not installed
if ($targetResource.Ensure -eq 'Absent')
{
try
{
Write-Verbose -Message ($script:localizedData.InstallingResourceMessage -f $FederationServiceName)
$Result = Install-WebApplicationProxy @parameters -ErrorAction SilentlyContinue
}
catch
{
$errorMessage = $script:localizedData.InstallationError -f $FederationServiceName
New-InvalidOperationException -Message $errorMessage -ErrorRecord $_
}
if ($Result.Status -eq 'Success')
{
Write-Verbose -Message ($script:localizedData.ResourceInstallSuccessMessage -f $FederationServiceName)
}
else
{
New-InvalidOperationException -Message $Result.Message
}
}
}
function Test-TargetResource
{
<#
.SYNOPSIS
Test-TargetResource
#>
[CmdletBinding()]
[OutputType([System.Boolean])]
param
(
[Parameter(Mandatory = $true)]
[System.String]
$FederationServiceName,
[Parameter(Mandatory = $true)]
[System.String]
$CertificateThumbprint,
[Parameter(Mandatory = $true)]
[System.Management.Automation.PSCredential]
$FederationServiceTrustCredential,
[Parameter()]
[System.String]
$ForwardProxy,
[Parameter()]
[System.Int32]
$HttpsPort,
[Parameter()]
[System.Int32]
$TlsClientPort
)
Write-Verbose -Message ($script:localizedData.TestingResourceMessage -f $FederationServiceName)
$GetTargetResourceParms = @{
FederationServiceName = $FederationServiceName
CertificateThumbprint = $CertificateThumbprint
FederationServiceTrustCredential = $FederationServiceTrustCredential
}
$targetResource = Get-TargetResource @GetTargetResourceParms
if ($targetResource.Ensure -eq 'Present')
{
# Resource is in desired state
Write-Verbose -Message ($script:localizedData.ResourceInDesiredStateMessage -f
$targetResource.FederationServiceName)
$inDesiredState = $true
}
else
{
Write-Verbose -Message ($script:localizedData.ResourceNotInDesiredStateMessage -f $FederationServiceName)
$inDesiredState = $false
}
$inDesiredState
}
Export-ModuleMember -Function *-TargetResource
| 35.529412
| 159
| 0.643672
|
12aac4c00fa9941e5cf940c313961de1ddc65995
| 2,312
|
cs
|
C#
|
ImageViewer/StudyManagement/Core/Storage/DicomQuery/QueryUtilities.cs
|
SNBnani/Xian
|
e07cb943476705ac3721921cf0f0906485d9f59d
|
[
"Apache-2.0"
] | 1
|
2019-02-18T11:41:46.000Z
|
2019-02-18T11:41:46.000Z
|
ImageViewer/StudyManagement/Core/Storage/DicomQuery/QueryUtilities.cs
|
nhannd/Xian
|
e07cb943476705ac3721921cf0f0906485d9f59d
|
[
"Apache-2.0"
] | null | null | null |
ImageViewer/StudyManagement/Core/Storage/DicomQuery/QueryUtilities.cs
|
nhannd/Xian
|
e07cb943476705ac3721921cf0f0906485d9f59d
|
[
"Apache-2.0"
] | null | null | null |
using System;
using System.Linq;
using System.Text.RegularExpressions;
using ClearCanvas.Dicom;
namespace ClearCanvas.ImageViewer.StudyManagement.Core.Storage.DicomQuery
{
internal static class QueryUtilities
{
//These are the VRs DICOM says can't be searched on with wildcards,
//therefore any wildcard characters present in the criteria are literal.
private static readonly string[] WildcardExcludedVRs = { "DA", "TM", "DT", "SL", "SS", "US", "UL", "FL", "FD", "OB", "OW", "UN", "AT", "DS", "IS", "AS", "UI" };
internal static bool IsWildcardCriterionAllowed(DicomVr vr)
{
return !WildcardExcludedVRs.Any(excludedVr => excludedVr == vr.Name);
}
internal static bool IsWildcardCriterion(DicomVr vr, string criterion)
{
if (String.IsNullOrEmpty(criterion))
return false;
if (!IsWildcardCriterionAllowed(vr))
return false;
return criterion.Contains("*") || criterion.Contains("?");
}
internal static bool IsLike(string value, string criterion)
{
string test = criterion.Replace("*", ".*"); //zero or more characters
test = test.Replace("?", "."); //single character
test = String.Format("^{0}", test); //match at beginning
//DICOM says if we manage an object having no value, it's considered a match,
//but it doesn't actually make sense, so we don't do it.
//DICOM also says matching is case sensitive, but that's just silly.
return Regex.IsMatch(value, test, RegexOptions.IgnoreCase);
}
internal static bool AreEqual(string value, string criterion)
{
//DICOM says if we manage an object having no value, it's considered a match,
//but it doesn't actually make sense, so we don't do it.
//DICOM also says matching is case sensitive, but that's just silly.
return 0 == string.Compare(value, criterion, StringComparison.InvariantCultureIgnoreCase);
}
internal static bool IsMultiValued(string value)
{
return !String.IsNullOrEmpty(value) && value.Contains(@"\");
}
}
}
| 41.285714
| 169
| 0.601644
|
e73cb68f5a3f5dd48e7dc50dba0beb24492f91d0
| 3,509
|
ps1
|
PowerShell
|
src/06_ScheduledTasks.ps1
|
DaXiong-Safe/PrivescCheck
|
16cd833d12fef5661a7b13487925e5a05e853a65
|
[
"BSD-3-Clause"
] | null | null | null |
src/06_ScheduledTasks.ps1
|
DaXiong-Safe/PrivescCheck
|
16cd833d12fef5661a7b13487925e5a05e853a65
|
[
"BSD-3-Clause"
] | null | null | null |
src/06_ScheduledTasks.ps1
|
DaXiong-Safe/PrivescCheck
|
16cd833d12fef5661a7b13487925e5a05e853a65
|
[
"BSD-3-Clause"
] | null | null | null |
function Invoke-ScheduledTasksImagePermissionsCheck {
<#
.SYNOPSIS
Enumrates scheduled tasks with a modifiable path
Author: @itm4n
License: BSD 3-Clause
.DESCRIPTION
This function enumerates all the scheduled tasks which are visible by the current user but are not owned by the current user. For each task, it extracts the command line and checks whether it contains a path pointing to a modifiable file. If a task is run as the current user, it is filtered out.
.EXAMPLE
PS C:\> Invoke-ScheduledTasksImagePermissionsCheck
TaskName : DummyTask
TaskPath : \CustomTasks\DummyTask
TaskFile : C:\Windows\System32\Tasks\CustomTasks\DummyTask
RunAs : NT AUTHORITY\SYSTEM
Command : C:\APPS\MyTask.exe
CurrentUserIsOwner : False
ModifiablePath : C:\APPS\
IdentityReference : NT AUTHORITY\Authenticated Users
Permissions : {Delete, WriteAttributes, Synchronize, ReadControl...}
#>
[CmdletBinding()] Param()
Get-ScheduledTaskList | Where-Object { -not $_.CurrentUserIsOwner } | ForEach-Object {
$CurrentTask = $_
$CurrentTask.Command | Get-ModifiablePath | Where-Object { $_ -and (-not [String]::IsNullOrEmpty($_.ModifiablePath)) } | ForEach-Object {
$ResultItem = $CurrentTask.PsObject.Copy()
$ResultItem | Add-Member -MemberType "NoteProperty" -Name "ModifiablePath" -Value $_.ModifiablePath
$ResultItem | Add-Member -MemberType "NoteProperty" -Name "IdentityReference" -Value $_.IdentityReference
$ResultItem | Add-Member -MemberType "NoteProperty" -Name "Permissions" -Value $_.Permissions
$ResultItem
}
}
}
function Invoke-ScheduledTasksUnquotedPathCheck {
<#
.SYNOPSIS
Enumerates scheduled tasks with an exploitable unquoted path
Author: @itm4n
License: BSD 3-Clause
.DESCRIPTION
This script first enumerates all the tasks that are visible to the current user. Then, it checks the 'Command' value to see if it is not surrounded by quotes (unquoted path). If so, it checks whether the path contains spaces and if one of the intermediate directories is exploitable. Note that, as a low privileged user, not all the tasks are visible.
.EXAMPLE
PS C:\> Invoke-ScheduledTasksUnquotedPathCheck
TaskName : VulnTask
TaskPath : \CustomTasks\VulnTask
TaskFile : C:\WINDOWS\System32\Tasks\CustomTasks\VulnTask
RunAs : NT AUTHORITY\SYSTEM
Command : C:\APPS\Custom Tasks\task.exe
CurrentUserIsOwner : False
ModifiablePath : C:\APPS
IdentityReference : NT AUTHORITY\Authenticated Users
Permissions : {Delete, WriteAttributes, Synchronize, ReadControl...}
#>
[CmdletBinding()] Param()
Get-ScheduledTaskList | Where-Object { $_.CurrentUserIsOwner -eq $false} | ForEach-Object {
$CurrentTask = $_
Get-ExploitableUnquotedPath -Path $CurrentTask.Command | ForEach-Object {
$ResultItem = $CurrentTask.PsObject.Copy()
$ResultItem | Add-Member -MemberType "NoteProperty" -Name "ModifiablePath" -Value $_.ModifiablePath
$ResultItem | Add-Member -MemberType "NoteProperty" -Name "IdentityReference" -Value $_.IdentityReference
$ResultItem | Add-Member -MemberType "NoteProperty" -Name "Permissions" -Value $_.Permissions
$ResultItem
}
}
}
| 40.333333
| 355
| 0.677401
|
da1384239e592320a5419a4a8be28b9443e7acbd
| 5,808
|
php
|
PHP
|
Tests/Functional/Manager/MenuUpdateManagerTest.php
|
oro-subtree/NavigationBundle
|
315b23e76d783aed8af00398ac3651d52c1e27ce
|
[
"MIT"
] | null | null | null |
Tests/Functional/Manager/MenuUpdateManagerTest.php
|
oro-subtree/NavigationBundle
|
315b23e76d783aed8af00398ac3651d52c1e27ce
|
[
"MIT"
] | null | null | null |
Tests/Functional/Manager/MenuUpdateManagerTest.php
|
oro-subtree/NavigationBundle
|
315b23e76d783aed8af00398ac3651d52c1e27ce
|
[
"MIT"
] | null | null | null |
<?php
namespace Oro\Bundle\NavigationBundle\Tests\Functional\Manager;
use Doctrine\ORM\EntityManager;
use Doctrine\ORM\EntityRepository;
use Oro\Component\Testing\Unit\EntityTrait;
use Oro\Bundle\NavigationBundle\Entity\MenuUpdate;
use Oro\Bundle\NavigationBundle\Manager\MenuUpdateManager;
use Oro\Bundle\NavigationBundle\Tests\Functional\DataFixtures\LoadMenuUpdateData;
use Oro\Bundle\TestFrameworkBundle\Test\WebTestCase;
/**
* @dbIsolation
*/
class MenuUpdateManagerTest extends WebTestCase
{
use EntityTrait;
const MENU_NAME = 'application_menu';
/** @var EntityManager */
protected $em;
/** @var EntityRepository */
protected $repository;
/** @var MenuUpdateManager */
protected $manager;
/**
* {@inheritdoc}
*/
protected function setUp()
{
$this->initClient([], $this->generateBasicAuthHeader());
$this->loadFixtures(
[
'Oro\Bundle\NavigationBundle\Tests\Functional\DataFixtures\LoadMenuUpdateData'
]
);
$this->manager = $this->getContainer()->get('oro_navigation.manager.menu_update_default');
$this->em = $this->getContainer()->get('doctrine')->getManagerForClass('OroNavigationBundle:MenuUpdate');
$this->repository = $this->em->getRepository('OroNavigationBundle:MenuUpdate');
}
public function testGetMenuUpdatesByMenuAndScopeOwnershipGlobal()
{
$updates = $this->manager->getMenuUpdatesByMenuAndScope(self::MENU_NAME, 'global', 0);
$this->assertCount(5, $updates);
}
public function testGetMenuUpdatesByMenuAndScopeOwnershipUser()
{
$updates = $this->manager->getMenuUpdatesByMenuAndScope(
self::MENU_NAME,
'user',
$this->getReference('simple_user')->getId()
);
$this->assertCount(2, $updates);
}
public function testGetMenuUpdateByKeyAndScopeGlobal()
{
$update = $this->manager->getMenuUpdateByKeyAndScope(
self::MENU_NAME,
LoadMenuUpdateData::MENU_UPDATE_1,
'global',
0
);
$result = $this->repository
->findOneBy(
[
'menu' => self::MENU_NAME,
'key' => LoadMenuUpdateData::MENU_UPDATE_1,
'ownershipType' => 'global',
'ownerId' => 0
]
);
$this->assertEquals($result, $update);
}
public function testGetMenuUpdateByKeyAndScopeUser()
{
$update = $this->manager->getMenuUpdateByKeyAndScope(
self::MENU_NAME,
LoadMenuUpdateData::MENU_UPDATE_3,
'user',
$this->getReference('simple_user')->getId()
);
$result = $this->repository
->findOneBy(
[
'menu' => self::MENU_NAME,
'key' => LoadMenuUpdateData::MENU_UPDATE_3,
'ownershipType' => 'user',
'ownerId' => $this->getReference('simple_user')->getId()
]
);
$this->assertEquals($result, $update);
}
public function testShowMenuItem()
{
$this->manager->showMenuItem(self::MENU_NAME, LoadMenuUpdateData::MENU_UPDATE_2_1, 'global', 0);
/** @var MenuUpdate[] $result */
$result = $this->repository
->findBy(
[
'menu' => self::MENU_NAME,
'key' => [
LoadMenuUpdateData::MENU_UPDATE_2,
LoadMenuUpdateData::MENU_UPDATE_2_1,
LoadMenuUpdateData::MENU_UPDATE_2_1_1
],
'ownershipType' => 'global',
'ownerId' => 0
]
);
foreach ($result as $entity) {
$this->assertTrue($entity->isActive());
}
}
public function testHideMenuItem()
{
$this->manager->hideMenuItem(self::MENU_NAME, LoadMenuUpdateData::MENU_UPDATE_1, 'global', 0);
/** @var MenuUpdate[] $result */
$result = $this->repository
->findBy(
[
'menu' => self::MENU_NAME,
'key' => [LoadMenuUpdateData::MENU_UPDATE_1, LoadMenuUpdateData::MENU_UPDATE_1_1],
'ownershipType' => 'global',
'ownerId' => 0
]
);
foreach ($result as $entity) {
$this->assertFalse($entity->isActive());
}
}
public function testResetMenuUpdatesWithOwnershipType()
{
$this->manager->resetMenuUpdatesWithOwnershipType('global', 0, self::MENU_NAME);
/** @var MenuUpdate[] $result */
$result = $this->repository
->findBy(
[
'menu' => self::MENU_NAME,
'ownershipType' => 'global',
'ownerId' => 0
]
);
$this->assertCount(0, $result);
}
public function testMoveMenuItem()
{
$updates = $this->manager->moveMenuItem(
self::MENU_NAME,
LoadMenuUpdateData::MENU_UPDATE_3_1,
'global',
0,
LoadMenuUpdateData::MENU_UPDATE_2,
0
);
$this->assertCount(2, $updates);
$this->assertEquals(1, $updates[0]->getPriority());
$this->assertEquals(LoadMenuUpdateData::MENU_UPDATE_3_1, $updates[0]->getKey());
$this->assertEquals(LoadMenuUpdateData::MENU_UPDATE_2, $updates[0]->getParentKey());
$this->assertEquals(LoadMenuUpdateData::MENU_UPDATE_2_1, $updates[1]->getKey());
$this->assertEquals(2, $updates[1]->getPriority());
}
}
| 29.938144
| 113
| 0.550103
|
394c077ecd3b3d0ef9f9736e079ba86826d27ce8
| 476
|
py
|
Python
|
src/domain/Submittion/usecase/submittion_repository.py
|
fossabot/daizu-online-judge-backend
|
34e8ad4fe99374056046cf400d038423db12549c
|
[
"MIT"
] | null | null | null |
src/domain/Submittion/usecase/submittion_repository.py
|
fossabot/daizu-online-judge-backend
|
34e8ad4fe99374056046cf400d038423db12549c
|
[
"MIT"
] | null | null | null |
src/domain/Submittion/usecase/submittion_repository.py
|
fossabot/daizu-online-judge-backend
|
34e8ad4fe99374056046cf400d038423db12549c
|
[
"MIT"
] | null | null | null |
from abc import ABC, abstractmethod
from typing import List
from domain.Submittion.submittion import Submittion
class SubmittionRepository(ABC):
@abstractmethod
def store(self, submittion: Submittion) -> int:
raise NotImplementedError()
@abstractmethod
def find_all(self, problem_id: str) -> List[Submittion]:
raise NotImplementedError()
@abstractmethod
def find(self, submit_id) -> Submittion:
raise NotImplementedError()
| 25.052632
| 60
| 0.72479
|
c9d63a8a6b8ffe91ca1b9d4796eef8bbb67ca79c
| 629
|
ts
|
TypeScript
|
src/common/redux/__tests__/createListActions.spec.ts
|
olegsmetanin/ghnav
|
bee92b4a82e71d252fe5b6cccb6c4984a40a1a2f
|
[
"MIT"
] | null | null | null |
src/common/redux/__tests__/createListActions.spec.ts
|
olegsmetanin/ghnav
|
bee92b4a82e71d252fe5b6cccb6c4984a40a1a2f
|
[
"MIT"
] | null | null | null |
src/common/redux/__tests__/createListActions.spec.ts
|
olegsmetanin/ghnav
|
bee92b4a82e71d252fe5b6cccb6c4984a40a1a2f
|
[
"MIT"
] | null | null | null |
import { createListActions } from '../createListActions'
describe('createListActions', () => {
it('works', () => {
const { actionTypes, failure, load, loadSuccess } = createListActions(
'ISSUE'
)
expect(actionTypes.FAILURE).toEqual('ISSUE_LIST/FAILURE')
expect(failure('error')).toEqual({
type: 'ISSUE_LIST/FAILURE',
error: 'error'
})
expect(load({ repo: 'repo' })).toEqual({
type: 'ISSUE_LIST/LOAD',
payload: { repo: 'repo' }
})
expect(loadSuccess({ repo: 'repo' })).toEqual({
type: 'ISSUE_LIST/LOAD_SUCCESS',
payload: { repo: 'repo' }
})
})
})
| 27.347826
| 74
| 0.589825
|
1f316079616750898d5c8e737a6b076f9eaead99
| 285
|
cs
|
C#
|
AWSCloudComputing.DataAccess/Customer.cs
|
jdrr2687/aws-cloud-computing-api
|
771cc21426eeb4e233a50260cc40eb1931d8d045
|
[
"MIT"
] | null | null | null |
AWSCloudComputing.DataAccess/Customer.cs
|
jdrr2687/aws-cloud-computing-api
|
771cc21426eeb4e233a50260cc40eb1931d8d045
|
[
"MIT"
] | null | null | null |
AWSCloudComputing.DataAccess/Customer.cs
|
jdrr2687/aws-cloud-computing-api
|
771cc21426eeb4e233a50260cc40eb1931d8d045
|
[
"MIT"
] | null | null | null |
using Dapper.Contrib.Extensions;
using System;
using System.Collections.Generic;
using System.Text;
namespace AWSCloudComputing.DataAccess
{
[Table("Cliente")]
public class Customer
{
public int Id { get; set; }
public string Nombre { get; set; }
}
}
| 19
| 42
| 0.670175
|
741c21d8b5c30c0fcdc5297ee8130b13f132b3d0
| 2,090
|
css
|
CSS
|
dist/css/style.min.css
|
KamilaSieczkowska/lightbox_gallery
|
e168dc40b34ecc39d639a9225c6b5d2abe273194
|
[
"MIT"
] | null | null | null |
dist/css/style.min.css
|
KamilaSieczkowska/lightbox_gallery
|
e168dc40b34ecc39d639a9225c6b5d2abe273194
|
[
"MIT"
] | null | null | null |
dist/css/style.min.css
|
KamilaSieczkowska/lightbox_gallery
|
e168dc40b34ecc39d639a9225c6b5d2abe273194
|
[
"MIT"
] | null | null | null |
*{box-sizing:border-box}html,body{margin:0;width:100%;background:#cccca2}.container{max-width:260px;margin:0 auto}@media (min-width: 760px){.container{max-width:630px}}@media (min-width: 1024px){.container{max-width:900px}}.title{margin:0;padding:50px 0;font-family:'Amatic SC', cursive;text-decoration:underline;text-align:center;font-size:90px;line-height:90px;color:#800000}.gallery{margin-top:50px;width:100%;min-height:100vh;display:flex;justify-content:space-between;flex-wrap:wrap}.image{width:270px;height:340px;background:#FFF9F9;padding:10px;box-shadow:1px 1px 5px black;position:relative;margin-bottom:60px;filter:sepia(60%)}.image:before{content:'';display:inline-block;position:absolute;width:90px;height:25px;background-color:rgba(236,236,236,0.6);box-shadow:0 1px grey;top:-1px;left:-26px;transform:rotate(-32deg);z-index:2}.image:after{content:'';display:inline-block;position:absolute;width:90px;height:25px;background-color:rgba(236,236,236,0.6);box-shadow:0 1px grey;bottom:-3px;right:-21px;transform:rotate(-22deg);z-index:3}.img1{transform:rotate(-5deg)}.img2{transform:rotate(2deg)}.img3{transform:rotate(6deg)}.img4{transform:rotate(4deg)}.img5{transform:rotate(-1deg)}.img6{transform:rotate(3deg)}.photo{display:inline-block;width:250px;height:250px;filter:sepia(60%)}.photo:hover{filter:none}.description,h3{font-family:'Caveat', cursive;font-size:35px;margin:0;color:#000000;padding:10px 0}.lightbox{width:100%;height:100%;background:rgba(0,0,0,0.8);position:fixed;top:0;left:0;z-index:10000;display:flex;justify-content:center;align-items:center}.lightbox__container{min-width:200px;min-height:200px;background:#fff;box-shadow:0 2px 5px rgba(0,0,0,0.7);padding:20px;position:relative}.lightbox__close{position:absolute;right:-10px;top:-5px;width:40px;height:40px;background:#970000;border:none;font-size:20px;color:#fff;box-shadow:0 2px 5px rgba(0,0,0,0.7);cursor:pointer}.lightbox__img{max-height:80vh}img{max-height:80vh;max-width:100%}footer{text-align:center;padding:20px}.gitlink{text-decoration:none;color:black}
/*# sourceMappingURL=style.min.css.map */
| 522.5
| 2,046
| 0.791866
|
11774ea5774cf8494cfff460750a408f2fa2060a
| 1,312
|
swift
|
Swift
|
test/Interop/Cxx/enum/scoped-enums.swift
|
xjc90s/swift
|
cafe5ccbd1b7aa9cc9c837c5be2cdf3d5acd8a49
|
[
"Apache-2.0"
] | 1
|
2020-01-27T17:36:38.000Z
|
2020-01-27T17:36:38.000Z
|
test/Interop/Cxx/enum/scoped-enums.swift
|
xjc90s/swift
|
cafe5ccbd1b7aa9cc9c837c5be2cdf3d5acd8a49
|
[
"Apache-2.0"
] | null | null | null |
test/Interop/Cxx/enum/scoped-enums.swift
|
xjc90s/swift
|
cafe5ccbd1b7aa9cc9c837c5be2cdf3d5acd8a49
|
[
"Apache-2.0"
] | null | null | null |
// RUN: %target-run-simple-swift(-I %S/Inputs -Xfrontend -enable-experimental-cxx-interop)
// REQUIRES: executable_test
import ScopedEnums
import StdlibUnittest
var ScopedEnumsTestSuite = TestSuite("Scoped Enums")
ScopedEnumsTestSuite.test("Make and compare") {
let val: ScopedEnumDefined = .x
expectEqual(val, .x)
}
ScopedEnumsTestSuite.test("Make and compare (not equal)") {
let val: ScopedEnumDefined = .x
expectNotEqual(val, .y)
}
func makeScopedEnumBasic() -> ScopedEnumBasic { .z }
ScopedEnumsTestSuite.test("Make and compare (ScopedEnumBasic)") {
let val: ScopedEnumBasic = .x
expectNotEqual(val, makeScopedEnumBasic())
expectEqual(.z, makeScopedEnumBasic())
}
ScopedEnumsTestSuite.test("Make and compare (ScopedEnumCharDefined)") {
expectEqual(ScopedEnumCharDefined(rawValue: 2), .y)
expectNotEqual(ScopedEnumCharDefined(rawValue: 2), ScopedEnumCharDefined(rawValue: 0))
}
ScopedEnumsTestSuite.test("Make and compare (ScopedEnumNegativeElement)") {
expectEqual(ScopedEnumNegativeElement(rawValue: -1), .x)
expectNotEqual(ScopedEnumNegativeElement(rawValue: 0), .x)
}
ScopedEnumsTestSuite.test("Make and compare (MiddleDefinedScopedEnum)") {
expectEqual(MiddleDefinedScopedEnum(rawValue: 42), .y)
expectEqual(MiddleDefinedScopedEnum(rawValue: 43), .z)
}
runAllTests()
| 29.818182
| 90
| 0.771341
|
9203dcbd35e7119b919a1fc61feed3dee6655bcd
| 1,002
|
rb
|
Ruby
|
lib/identitee/loader.rb
|
natedavisolds/identify
|
e1dbf150ebd7bec34ad2ab530058b44e5d68aeca
|
[
"MIT"
] | 1
|
2017-04-21T13:22:21.000Z
|
2017-04-21T13:22:21.000Z
|
lib/identitee/loader.rb
|
natedavisolds/identify
|
e1dbf150ebd7bec34ad2ab530058b44e5d68aeca
|
[
"MIT"
] | null | null | null |
lib/identitee/loader.rb
|
natedavisolds/identify
|
e1dbf150ebd7bec34ad2ab530058b44e5d68aeca
|
[
"MIT"
] | null | null | null |
module Identitee
class Loader
def initialize options={}
@identify_root_directory = options.delete(:identify_root_directory)
end
def loadable? filename
File.exists? full_path(filename)
end
def lazy_load filename
if loadable? filename
force_load full_path(filename)
else
yield if block_given?
end
end
def load_all
if not_fully_loaded?
force_full_load
end
end
private
attr_reader :identify_root_directory
def full_path filename
[identify_root_directory, "#{filename}.rb"].compact.join('/')
end
def force_full_load
@fully_loaded = true
Dir.glob("#{identify_root_directory}/*.rb").each do |file_name|
lazy_load File.basename(file_name, ".*")
end
end
def force_load filename
load filename.to_s
end
def loaded_already?
@fully_loaded == true
end
def not_fully_loaded?
not loaded_already?
end
end
end
| 18.90566
| 73
| 0.644711
|
27c904f95b693dc116d64c7b2efcfd74d0d889e5
| 1,487
|
html
|
HTML
|
Events/item-10.html
|
SebastinSanty/QuarkWebsite2016
|
a72d39fb80f107726fb354123b59f14ffb27784d
|
[
"Apache-2.0"
] | 1
|
2016-09-23T04:32:19.000Z
|
2016-09-23T04:32:19.000Z
|
Events/item-10.html
|
SebastinSanty/QuarkWebsite2016
|
a72d39fb80f107726fb354123b59f14ffb27784d
|
[
"Apache-2.0"
] | null | null | null |
Events/item-10.html
|
SebastinSanty/QuarkWebsite2016
|
a72d39fb80f107726fb354123b59f14ffb27784d
|
[
"Apache-2.0"
] | 2
|
2017-10-15T16:28:22.000Z
|
2019-06-01T07:36:04.000Z
|
<!doctype html>
<html lang="en" class="no-js">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1">
<link rel="stylesheet" href="css/reset.css"> <!-- CSS reset -->
<link rel="stylesheet" href="css/style.css"> <!-- Resource style -->
<script src="js/modernizr.js"></script> <!-- Modernizr -->
<title>BITSMUN</title>
</head>
<body>
<div class="cd-fold-content single-page">
<h2>BITSMUN</h2>
<p>
BITSMUN is one of the fastest growing MUN conferences that takes place at India’s most sought-after holiday destination. One of the very few Indian MUNs that sees international participation, BITSMUN 2015 saw delegates coming all the way down to Goa from 9 different countries.
</p>
<br>
<p>
One of the most celebrated MUNs in India, the conference spans over a period of 3 days,where participants showcase their exemplary communication skills, be it first timers or seasoned MUNers from all over India. Every day at BITSMUN is unique in itself, with fantastic debate and social events. BITSMUN’14 saw a participation of over 350 delegates, making it one of the largest MUNs in the country.
</p>
<br>
<p>
Register for BITSMUN 2016 at:
</p>
<br>
<br>
<a href="http://www.bitsmungoa.in" target="_blank">
<button id="down" type="button" class="btn btn-primary">
Register Here
</button>
</a>
</div>
</div>
</body>
<script src="js/jquery-2.1.1.js"></script>
<script src="js/main.js"></script> <!-- Resource jQuery -->
</body>
</html>
| 36.268293
| 398
| 0.712172
|
d5851d8dab2dc1b85ab1c509b35acc8ab381964b
| 79
|
sql
|
SQL
|
localization/bitcoin.sql
|
DeshiLeaksBD/esx_bitcoin-master
|
2d8683b1b015f145deb4b1a2cc74fdb09195bb8a
|
[
"MIT"
] | null | null | null |
localization/bitcoin.sql
|
DeshiLeaksBD/esx_bitcoin-master
|
2d8683b1b015f145deb4b1a2cc74fdb09195bb8a
|
[
"MIT"
] | null | null | null |
localization/bitcoin.sql
|
DeshiLeaksBD/esx_bitcoin-master
|
2d8683b1b015f145deb4b1a2cc74fdb09195bb8a
|
[
"MIT"
] | null | null | null |
INSERT INTO `items` (name, label, `limit`) VALUES
('bitcoin', 'Bitcoin', 50)
;
| 26.333333
| 49
| 0.64557
|
ddd2025241b7ea35cce88eda76705bda3768d968
| 2,048
|
java
|
Java
|
plugins_logicrepository/cfg/src/test/java/com/runtimeverification/rvmonitor/logicrepository/plugins/cfg/DeepCopyTest.java
|
mylibero/rv-monitor
|
6997a4b06e25fab1a994e32b2fef1a908bb12830
|
[
"MIT"
] | 15
|
2017-10-03T00:14:23.000Z
|
2021-03-25T20:19:48.000Z
|
plugins_logicrepository/cfg/src/test/java/com/runtimeverification/rvmonitor/logicrepository/plugins/cfg/DeepCopyTest.java
|
mylibero/rv-monitor
|
6997a4b06e25fab1a994e32b2fef1a908bb12830
|
[
"MIT"
] | 5
|
2018-02-02T15:55:34.000Z
|
2020-12-01T00:08:32.000Z
|
plugins_logicrepository/cfg/src/test/java/com/runtimeverification/rvmonitor/logicrepository/plugins/cfg/DeepCopyTest.java
|
mylibero/rv-monitor
|
6997a4b06e25fab1a994e32b2fef1a908bb12830
|
[
"MIT"
] | 7
|
2017-10-11T19:30:31.000Z
|
2020-11-30T20:39:58.000Z
|
package com.runtimeverification.rvmonitor.logicrepository.plugins.cfg;
import com.runtimeverification.rvmonitor.logicrepository.plugins.cfg.util.DeepCopy;
import java.util.HashSet;
import java.util.ArrayList;
import java.util.Arrays;
import org.junit.Test;
import static org.junit.Assert.*;
/**
* Test code that produces deep copies of arbitrary data structures.
*/
public class DeepCopyTest {
/**
* Test that deep independent copies can be made from ArrayLists.
*/
@Test
public void testNestedArrayLists() {
ArrayList<ArrayList<String>> arrs = new ArrayList<ArrayList<String>>();
arrs.add(new ArrayList<String>());
arrs.add(new ArrayList<String>(Arrays.asList("a")));
arrs.add(new ArrayList<String>(Arrays.asList("a", "b")));
ArrayList<ArrayList<String>> copy = DeepCopy.copy(arrs);
assertEquals(arrs, copy);
for(int i = 0; i < 2; i++) {
assertEquals(arrs.get(i), copy.get(i));
}
arrs.get(0).add("c");
assertFalse(arrs.equals(copy));
assertFalse(arrs.get(0).equals(copy.get(0)));
}
/**
* Test that deep independent copies can be made from HashSets.
*/
@Test
public void testNestedHashSets() {
HashSet<HashSet<String>> sets = new HashSet<HashSet<String>>();
sets.add(new HashSet<String>());
sets.add(new HashSet<String>(Arrays.asList("a")));
sets.add(new HashSet<String>(Arrays.asList("a", "b")));
HashSet<HashSet<String>> copy = DeepCopy.copy(sets);
assertEquals(sets, copy);
assertTrue(copy.contains(new HashSet<String>()));
assertTrue(copy.contains(new HashSet<String>(Arrays.asList("a"))));
assertTrue(copy.contains(new HashSet<String>(Arrays.asList("a", "b"))));
sets.remove(new HashSet<String>());
assertFalse(sets.equals(copy));
assertFalse(sets.contains(new HashSet<String>()));
assertTrue(copy.contains(new HashSet<String>()));
}
}
| 34.711864
| 83
| 0.631348
|
4bb5cecbca7450d24d67d5d5791d2c31537f8338
| 451
|
h
|
C
|
Source/Public/consoleUtils.h
|
TK-Aria/apk-installer
|
fa7a8923ff46c82416867cec17781f51f9adfea7
|
[
"MIT"
] | 6
|
2019-03-27T02:08:20.000Z
|
2019-05-07T16:58:34.000Z
|
Source/Public/consoleUtils.h
|
TK-Aria/apk-installer
|
fa7a8923ff46c82416867cec17781f51f9adfea7
|
[
"MIT"
] | null | null | null |
Source/Public/consoleUtils.h
|
TK-Aria/apk-installer
|
fa7a8923ff46c82416867cec17781f51f9adfea7
|
[
"MIT"
] | null | null | null |
//
// @file main.cpp
// @brief 簡単な説明
// @author 作成者
// @date 作成日
// @details 詳細な説明
//
#pragma once
struct CommnadParameter
{
int argc;
const char** argv;
};
String SearchPathFromCommandArg( const String& searchTarget, CommnadParameter& param )
{
const int AppPathIndex = 0;
String appPath = param.argv[AppPathIndex];
auto pos = appPath.find_last_of(searchTarget.data());
appPath.replace(pos, searchTarget.length(), "");
return appPath;
}
| 16.107143
| 86
| 0.709534
|
a16a39f5ec6dd60a5fe45cbdd7c7df6e871fd21f
| 156
|
ts
|
TypeScript
|
src/components/button/index.ts
|
easynet-cn/easymesh-ui
|
a692b8123ea677a7a24b3e6e9b0da8c565e3b742
|
[
"Apache-2.0"
] | null | null | null |
src/components/button/index.ts
|
easynet-cn/easymesh-ui
|
a692b8123ea677a7a24b3e6e9b0da8c565e3b742
|
[
"Apache-2.0"
] | null | null | null |
src/components/button/index.ts
|
easynet-cn/easymesh-ui
|
a692b8123ea677a7a24b3e6e9b0da8c565e3b742
|
[
"Apache-2.0"
] | null | null | null |
import Button from './Button.vue'
import { App } from 'vue'
Button.install = (Vue: App) => {
Vue.component(Button.name, Button)
}
export default Button
| 17.333333
| 36
| 0.685897
|
ef5194fbd7ace4377816684a27f1fea637da8cf6
| 3,841
|
js
|
JavaScript
|
test/unit/auth/auth-retry-queue.service.test.js
|
subrosa/subrosa-web-client
|
5a781942e0b9b0ba0b27004241e17e695c46d819
|
[
"MIT"
] | null | null | null |
test/unit/auth/auth-retry-queue.service.test.js
|
subrosa/subrosa-web-client
|
5a781942e0b9b0ba0b27004241e17e695c46d819
|
[
"MIT"
] | null | null | null |
test/unit/auth/auth-retry-queue.service.test.js
|
subrosa/subrosa-web-client
|
5a781942e0b9b0ba0b27004241e17e695c46d819
|
[
"MIT"
] | null | null | null |
describe('Service: authRetryQueue', function () {
'use strict';
var $http, deferred, authRetryQueue;
beforeEach(module('subrosa.auth'));
beforeEach(function () {
deferred = {
resolve: function () {},
reject: function () {}
};
});
beforeEach(module(function ($provide) {
$http = function (config) {
return {then: function (successCallback, errorCallback) {
if (config.success) {
successCallback(config);
} else {
errorCallback(config);
}
}};
};
$provide.value('$http', $http);
}));
beforeEach(inject(function (_authRetryQueue_) {
authRetryQueue = _authRetryQueue_;
}));
it('can return the request queue', function () {
var queue = authRetryQueue.getQueue();
expect(queue.length).toBe(0);
});
it('can append HTTP requests', function () {
var queue;
authRetryQueue.append('config', deferred);
queue = authRetryQueue.getQueue();
expect(queue.length).toBe(1);
expect(queue[0].config).toBe('config');
expect(queue[0].deferred).toBe(deferred);
});
describe("can perform actions on an existing queue", function () {
var config;
beforeEach(function () {
config = {success: true};
authRetryQueue.append(config, deferred);
authRetryQueue.append(config, deferred);
authRetryQueue.append(config, deferred);
});
it('by transforming all requests', function () {
authRetryQueue.transform(function () {
return {config: 'newValue', deferred: deferred};
});
angular.forEach(authRetryQueue.getQueue(), function (item) {
expect(item.config).toBe('newValue');
});
});
it('by rejecting all requests with a reason', function () {
spyOn(deferred, 'reject');
authRetryQueue.rejectAll('invalid');
expect(deferred.reject).toHaveBeenCalledWith('invalid');
expect(deferred.reject.calls.count()).toBe(3);
expect(authRetryQueue.getQueue().length).toBe(0);
});
it('by rejecting all requests without a reason', function () {
spyOn(deferred, 'reject');
authRetryQueue.rejectAll();
expect(deferred.reject).not.toHaveBeenCalled();
expect(authRetryQueue.getQueue().length).toBe(0);
});
it('by retrying all requests and updating them', function () {
var newConfig = {success: true, blah: 'blah'},
updater = function () { return newConfig; };
spyOn(deferred, 'resolve');
authRetryQueue.retryAll(updater);
expect(deferred.resolve).toHaveBeenCalledWith(newConfig);
expect(deferred.resolve.calls.count()).toBe(3);
expect(authRetryQueue.getQueue().length).toBe(0);
});
it('by retrying all requests and not updating them', function () {
spyOn(deferred, 'resolve');
authRetryQueue.retryAll();
expect(deferred.resolve).toHaveBeenCalledWith(config);
expect(deferred.resolve.calls.count()).toBe(3);
expect(authRetryQueue.getQueue().length).toBe(0);
});
});
it('by retying all requests and encountering an error', function () {
var config = {success: false};
authRetryQueue.append(config, deferred);
spyOn(deferred, 'reject');
authRetryQueue.retryAll();
expect(deferred.reject).toHaveBeenCalledWith(config);
expect(deferred.reject.calls.count()).toBe(1);
expect(authRetryQueue.getQueue().length).toBe(0);
});
});
| 34.294643
| 74
| 0.568602
|
d637ae6a064ff7fb298aab64d1ac92ba0550bb2f
| 1,032
|
cs
|
C#
|
01.Linear-Data-Structures/01.SumAndAverageOfSequence/Startup.cs
|
bozhidar-slavov/12.Data-Structures-and-Algorithms
|
0acc3611dfedfd4c2213d010459d5b46e0def80c
|
[
"MIT"
] | null | null | null |
01.Linear-Data-Structures/01.SumAndAverageOfSequence/Startup.cs
|
bozhidar-slavov/12.Data-Structures-and-Algorithms
|
0acc3611dfedfd4c2213d010459d5b46e0def80c
|
[
"MIT"
] | null | null | null |
01.Linear-Data-Structures/01.SumAndAverageOfSequence/Startup.cs
|
bozhidar-slavov/12.Data-Structures-and-Algorithms
|
0acc3611dfedfd4c2213d010459d5b46e0def80c
|
[
"MIT"
] | null | null | null |
namespace SumAndAverageOfSequence
{
using System;
using System.Collections.Generic;
using System.Linq;
public class Startup
{
public static void Main()
{
Console.Write("Enter a positive integer: ");
string input = Console.ReadLine();
var list = new List<int>();
int currentNumber;
while (!string.IsNullOrEmpty(input))
{
if (int.TryParse(input, out currentNumber) && currentNumber > 0)
{
list.Add(currentNumber);
}
else
{
Console.WriteLine("Invalid integer!");
}
Console.Write("Enter a positive integer: ");
input = Console.ReadLine();
}
var sum = list.Sum();
var average = list.Average();
Console.WriteLine($"Sum -> {sum}");
Console.WriteLine($"Average -> {average:F2}");
}
}
}
| 26.461538
| 80
| 0.473837
|
25588b1351b50db88b5532b79e0d5c8a1d112c97
| 282
|
cs
|
C#
|
src/MongoDB.Driver.Extensions/Paging/Requests/SimpleKeyRequest.cs
|
imperugo/MongoDB.Driver.Extensions
|
3d6dbd0943803e965c14e8872e1081e42b1fb595
|
[
"MIT"
] | 2
|
2020-04-01T16:17:43.000Z
|
2021-01-18T09:03:18.000Z
|
src/MongoDB.Driver.Extensions/Paging/Requests/SimpleKeyRequest.cs
|
imperugo/MongoDB.Driver.Extensions
|
3d6dbd0943803e965c14e8872e1081e42b1fb595
|
[
"MIT"
] | null | null | null |
src/MongoDB.Driver.Extensions/Paging/Requests/SimpleKeyRequest.cs
|
imperugo/MongoDB.Driver.Extensions
|
3d6dbd0943803e965c14e8872e1081e42b1fb595
|
[
"MIT"
] | 4
|
2018-11-27T11:35:34.000Z
|
2020-06-25T13:42:35.000Z
|
namespace MongoDB.Driver.Extensions.Paging.Requests
{
public class SimpleKeyRequest<T>
{
public SimpleKeyRequest()
{
}
public SimpleKeyRequest(T id)
{
Id = id;
}
public virtual T Id { get; set; }
}
}
| 17.625
| 52
| 0.521277
|
6df590020a3cacd7459b7f91ca111aa77f4ce4a4
| 516
|
dart
|
Dart
|
lib/app/main_page/features/online_shop_main_page/presentation/pages/main_page/main_page_view_model.dart
|
Marcusjnr/online_shop_app
|
e3aa2cb90ec9730782200f050ac89d463820548d
|
[
"Apache-2.0"
] | 1
|
2022-03-16T23:58:34.000Z
|
2022-03-16T23:58:34.000Z
|
lib/app/main_page/features/online_shop_main_page/presentation/pages/main_page/main_page_view_model.dart
|
Marcusjnr/online_shop_app
|
e3aa2cb90ec9730782200f050ac89d463820548d
|
[
"Apache-2.0"
] | null | null | null |
lib/app/main_page/features/online_shop_main_page/presentation/pages/main_page/main_page_view_model.dart
|
Marcusjnr/online_shop_app
|
e3aa2cb90ec9730782200f050ac89d463820548d
|
[
"Apache-2.0"
] | null | null | null |
import 'package:online_shop_app/app/base/base_view_model.dart';
import 'package:online_shop_app/app/main_page/features/online_shop_main_page/data/models/pastry_model.dart';
import 'package:online_shop_app/core/enums/pastry_type.dart';
import '../../bloc/get_local_mock_data/get_local_mock_data_cubit.dart';
class MainPageViewModel extends BaseViewModel{
GetLocalMockDataCubit getLocalMockDataCubit;
MainPageViewModel(this.getLocalMockDataCubit);
init(){
getLocalMockDataCubit.getLocalMockData();
}
}
| 32.25
| 108
| 0.825581
|
cf3dfd91af002644d934cef5847a151f16f361d9
| 1,283
|
lua
|
Lua
|
Lua/Mysteries/TheMarsBug.lua
|
dawnmist/SurvivingMars
|
02a3dca12d10998c5fd0a077ca8939ef6b3d4dd3
|
[
"BSD-Source-Code"
] | 94
|
2018-04-04T20:43:27.000Z
|
2022-01-30T22:26:26.000Z
|
Lua/Mysteries/TheMarsBug.lua
|
dawnmist/SurvivingMars
|
02a3dca12d10998c5fd0a077ca8939ef6b3d4dd3
|
[
"BSD-Source-Code"
] | 4
|
2018-04-06T07:52:08.000Z
|
2018-04-27T04:27:19.000Z
|
Lua/Mysteries/TheMarsBug.lua
|
dawnmist/SurvivingMars
|
02a3dca12d10998c5fd0a077ca8939ef6b3d4dd3
|
[
"BSD-Source-Code"
] | 26
|
2018-04-05T01:56:10.000Z
|
2022-02-20T19:27:44.000Z
|
DefineClass.TheMarsBug = {
__parents = {"MysteryBase"},
scenario_name = "Mystery 8",
display_name = T(8067, "Wildfire (Hard)"),
rollover_text = T(8068, '"The worst pandemic in modern history was the Spanish flu of 1918, which killed tens of millions of people. Today, with how interconnected the world is, it would spread faster."<newline><right>- Bill Gates'),
challenge_mod = 60,
order_pos = 11,
--mystery resource properties
resource_display_name = T(8458, "Wildfire Cure"),
resource_display_icon = "UI/Icons/Buildings/res_mystery_resource.tga",
resource_tag_icon = "UI/Icons/res_mystery_resource.tga",
resource_unit_amount = const.ResourceScale,
resource_color = RGB(0, 255, 0),
resource_entity = "ResourceMystery",
resource_description = T(8459, "Medicine preventing the Wildfire infection from destroying its host and from spreading to other humans."),
depot_display_name = T(8531, "Wildfire Cure Depot"),
depot_display_name_pl = T(8532, "Wildfire Cure Depots"),
depot_description = T(8533, "Stores the cure for the Wildfire infection."),
mech_depot_display_name = T(8796, "Wildfire Cure Storage"),
mech_depot_display_name_pl = T(8797, "Wildfire Cure Storages"),
mech_depot_description = T(8533, "Stores the cure for the Wildfire infection."),
}
| 47.518519
| 234
| 0.758379
|
2fe401b5d59daf742b26e87d56c26284f5d7b47c
| 1,953
|
py
|
Python
|
eventsourcing/utils/cipher/aes.py
|
HanhThong/eventsourcing
|
99c560cca7c6e9d59855bcd82c371794e1a8bfb9
|
[
"BSD-3-Clause"
] | null | null | null |
eventsourcing/utils/cipher/aes.py
|
HanhThong/eventsourcing
|
99c560cca7c6e9d59855bcd82c371794e1a8bfb9
|
[
"BSD-3-Clause"
] | null | null | null |
eventsourcing/utils/cipher/aes.py
|
HanhThong/eventsourcing
|
99c560cca7c6e9d59855bcd82c371794e1a8bfb9
|
[
"BSD-3-Clause"
] | null | null | null |
import zlib
import binascii
from base64 import b64decode, b64encode
from Crypto.Cipher import AES
from eventsourcing.exceptions import DataIntegrityError
from eventsourcing.utils.random import random_bytes
class AESCipher(object):
"""
Cipher strategy that uses Crypto library AES cipher in GCM mode.
"""
def __init__(self, cipher_key: bytes):
"""
Initialises AES cipher strategy with ``cipher_key``.
:param cipher_key: 16, 24, or 32 random bytes
"""
assert len(cipher_key) in [16, 24, 32]
self.cipher_key = cipher_key
def encrypt(self, plaintext: bytes) -> bytes:
"""Return ciphertext for given plaintext."""
# Construct AES-GCM cipher, with 96-bit nonce.
cipher = AES.new(self.cipher_key, AES.MODE_GCM, nonce=random_bytes(12))
# Encrypt and digest.
encrypted, tag = cipher.encrypt_and_digest(plaintext) # type: ignore
# Combine with nonce.
ciphertext = cipher.nonce + tag + encrypted # type: ignore
# Return ciphertext.
return ciphertext
def decrypt(self, ciphertext: bytes) -> bytes:
"""Return plaintext for given ciphertext."""
# Split out the nonce, tag, and encrypted data.
nonce = ciphertext[:12]
if len(nonce) != 12:
raise DataIntegrityError("Cipher text is damaged: invalid nonce length")
tag = ciphertext[12:28]
if len(tag) != 16:
raise DataIntegrityError("Cipher text is damaged: invalid tag length")
encrypted = ciphertext[28:]
# Construct AES cipher, with old nonce.
cipher = AES.new(self.cipher_key, AES.MODE_GCM, nonce)
# Decrypt and verify.
try:
plaintext = cipher.decrypt_and_verify(encrypted, tag) # type: ignore
except ValueError as e:
raise DataIntegrityError("Cipher text is damaged: {}".format(e))
return plaintext
| 30.515625
| 84
| 0.642601
|
112262b87fd122c1c02b1ca5f26c182825368b21
| 2,715
|
swift
|
Swift
|
Tests/JustSignalsTests/JustSignalsTests.swift
|
devandsev/JustSignals
|
b901c5e25e41dfc3d5559063e6b44c3ac2e06c8e
|
[
"MIT"
] | null | null | null |
Tests/JustSignalsTests/JustSignalsTests.swift
|
devandsev/JustSignals
|
b901c5e25e41dfc3d5559063e6b44c3ac2e06c8e
|
[
"MIT"
] | null | null | null |
Tests/JustSignalsTests/JustSignalsTests.swift
|
devandsev/JustSignals
|
b901c5e25e41dfc3d5559063e6b44c3ac2e06c8e
|
[
"MIT"
] | null | null | null |
//
// JustSignalsTests.swift
// JustSignals
//
// Created by Andrey Sevrikov on 23/02/2018.
// Copyright © 2018 Andrey Sevrikov. All rights reserved.
//
import Foundation
import XCTest
@testable import JustSignals
class JustSignalsTests: XCTestCase {
// MARK: - Tests
func testFire() {
// given
var fired = false
let signal = Signal<Void>()
signal.subscribe(with: self) {
fired = true
}
// when
signal.fire(())
// then
XCTAssertTrue(fired)
}
func testFireWithData() {
// given
var currentData = 0
let signal = Signal<Int>()
signal.subscribe(with: self) { data in
currentData = data
}
// when
signal.fire(4)
// then
XCTAssertEqual(currentData, 4)
}
func testMultipleFire() {
// given
var counter = 0
let signal = Signal<Void>()
signal.subscribe(with: self) {
counter += 1
}
// when
signal.fire(())
signal.fire(())
// then
XCTAssertEqual(counter, 2)
}
func testDisposeOnSubscribe() {
// given
let signal = Signal<Void>()
var subscribers = [Subscriber(), Subscriber()]
// when
signal.subscribe(with: subscribers[0]) {
}
subscribers.removeFirst()
signal.subscribe(with: subscribers[0]) {
}
// then
XCTAssertEqual(signal.subscriptionsCount, 1)
}
func testDisposeOnFire() {
// given
let signal = Signal<Void>()
let counter = Counter()
var subscribers = [Subscriber()]
subscribers.first!.setup(with: signal, counter: counter)
// when
signal.fire(())
subscribers.removeAll()
signal.fire(())
// then
XCTAssertEqual(counter.i, 1)
}
func testUnsubscribe() {
// given
var fired = false
let signal = Signal<Void>()
signal.subscribe(with: self) {
fired = true
}
// when
signal.unsubscribe(self)
signal.fire(())
// then
XCTAssertFalse(fired)
}
// MARK: - Helpers
class Counter {
var i = 0
}
class Subscriber {
func setup<T>(with signal: Signal<T>, counter: Counter) {
signal.subscribe(with: self) { data in
counter.i += 1
}
}
}
}
| 20.413534
| 65
| 0.474033
|
386a0d2519d79168e3aa7ad523e2f94eedaab06f
| 1,910
|
php
|
PHP
|
resources/views/admin/manufacturer/createManufacturer.blade.php
|
Robiulalam/laravelsmartshop
|
ba30233f80c09bdc279c5bafae2932871cbeda5c
|
[
"MIT"
] | null | null | null |
resources/views/admin/manufacturer/createManufacturer.blade.php
|
Robiulalam/laravelsmartshop
|
ba30233f80c09bdc279c5bafae2932871cbeda5c
|
[
"MIT"
] | null | null | null |
resources/views/admin/manufacturer/createManufacturer.blade.php
|
Robiulalam/laravelsmartshop
|
ba30233f80c09bdc279c5bafae2932871cbeda5c
|
[
"MIT"
] | null | null | null |
@extends('admin.master')
@section('content')
<br/>
<hr/>
<h1 class="text-center text-success">Add Manufacturer Form</h1>
<hr/>
<h2 class="text-center text-success">{{ Session::get('message') }}</h2>
<hr/>
<div class="row">
<div class="col-sm-12">
<div class="well">
{!! Form::open(['url'=>'manufacturer/save', 'method'=> 'POST', 'class'=> 'form-horizontal']) !!}
<div class="form-group">
<label for="manufacturer_name" class="control-label col-sm-3">Manufacturer Name :</label>
<div class="col-sm-9">
<input class="form-control" id="manufacturer_name" type="text" name="manufacturer_name">
<span class="text-danger">{{ $errors->has('manufacturer_name')? $errors->first('manufacturer_name'):'' }}</span>
</div>
</div>
<div class="form-group">
<label for="manufacturer_description" class="control-label col-sm-3">Manufacturer Description :</label>
<div class="col-sm-9">
<textarea class="form-control" name="manufacturer_description" id="manufacturer_description" ></textarea>
<span class="text-danger">{{ $errors->has('manufacturer_description')? $errors->first('manufacturer_description'):'' }}</span>
</div>
</div>
<div class="form-group">
<label for="publication_status" class="control-label col-sm-3">Publication Status :</label>
<div class="col-sm-9">
<select class="form-control" name="publication_status" id="publication_status">
<option>--Select Publication status--</option>
<option value="1">Published</option>
<option value="0">Unpublished</option>
</select>
</div>
</div>
<div class="form-group">
<div class="col-sm-9 col-sm-offset-3">
<input type="submit" name="btn" value="Save Manufacturer Info" class="btn btn-success btn-block">
</div>
</div>
{!! Form::close() !!}
</div>
</div>
</div>
@endsection
| 30.31746
| 132
| 0.627225
|
0ae6a958add0cbe7ef2a1ac31ad7a523b3e74d09
| 2,428
|
cs
|
C#
|
Foundation/Server/Bit.Core/Contracts/IDependencyManager.cs
|
abidram/bit-framework
|
bf9af5c833afb5f6bb462c821e7510948f31918e
|
[
"MIT"
] | null | null | null |
Foundation/Server/Bit.Core/Contracts/IDependencyManager.cs
|
abidram/bit-framework
|
bf9af5c833afb5f6bb462c821e7510948f31918e
|
[
"MIT"
] | null | null | null |
Foundation/Server/Bit.Core/Contracts/IDependencyManager.cs
|
abidram/bit-framework
|
bf9af5c833afb5f6bb462c821e7510948f31918e
|
[
"MIT"
] | null | null | null |
using System.Collections.Generic;
using System.Reflection;
using System;
namespace Foundation.Core.Contracts
{
public enum DependencyLifeCycle
{
SingleInstance,
InstancePerLifetimeScope
}
public interface IDependencyResolver : IServiceProvider, IDisposable
{
TContract Resolve<TContract>(string name = null);
TContract ResolveOptional<TContract>(string name = null)
where TContract : class;
IEnumerable<TContract> ResolveAll<TContract>(string name = null);
object Resolve(TypeInfo contractType, string name = null);
object ResolveOptional(TypeInfo contractType, string name = null);
IEnumerable<object> ResolveAll(TypeInfo contractType, string name = null);
bool IsRegistered<TContract>();
bool IsRegistered(TypeInfo contractType);
}
public interface IDependencyManager : IDependencyResolver
{
IDependencyManager Init();
IDependencyManager BuildContainer();
IDependencyManager RegisterHubs(params Assembly[] assemblies);
IDependencyManager RegisterApiControllers(params Assembly[] assemblies);
bool IsInited();
IDependencyManager Register<TContract, TService>(string name = null,
DependencyLifeCycle lifeCycle = DependencyLifeCycle.InstancePerLifetimeScope, bool overwriteExciting = true)
where TService : class, TContract;
IDependencyManager Register(TypeInfo contractType, TypeInfo serviceType, string name = null,
DependencyLifeCycle lifeCycle = DependencyLifeCycle.InstancePerLifetimeScope, bool overwriteExciting = true);
IDependencyManager RegisterInstance<TContract>(TContract implementationInstance, bool overwriteExciting = true, string name = null)
where TContract : class;
IDependencyManager RegisterInstance(object obj, TypeInfo contractType, bool overwriteExciting = true, string name = null);
IDependencyManager RegisterGeneric(TypeInfo contractType, TypeInfo serviceType, DependencyLifeCycle lifeCycle);
IDependencyManager RegisterUsing<TContract>(Func<TContract> factory, string name = null,
DependencyLifeCycle lifeCycle = DependencyLifeCycle.InstancePerLifetimeScope, bool overwriteExciting = true);
IDependencyResolver CreateChildDependencyResolver(Action<IDependencyManager> childDependencyManager = null);
}
}
| 37.9375
| 139
| 0.739703
|
0d08ba7f68c818d15219decb5a904ca9f4c9b2b7
| 109
|
rb
|
Ruby
|
src/app/models/article.rb
|
sourabhedake/rate-it
|
0016883a292f9f2f4a98e68ea4f75ff0c7099325
|
[
"MIT"
] | null | null | null |
src/app/models/article.rb
|
sourabhedake/rate-it
|
0016883a292f9f2f4a98e68ea4f75ff0c7099325
|
[
"MIT"
] | 4
|
2020-06-17T09:11:45.000Z
|
2021-09-28T03:51:05.000Z
|
src/app/models/article.rb
|
sourabhedake/rate-it
|
0016883a292f9f2f4a98e68ea4f75ff0c7099325
|
[
"MIT"
] | null | null | null |
class Article
include Mongoid::Document
field :art_id, type: Integer
field :art_name, type: String
end
| 18.166667
| 31
| 0.752294
|
b8a4aa08300cb194220a048212c03ef9320f9d3c
| 3,213
|
lua
|
Lua
|
core/languages.lua
|
simoncozens/sile
|
566eaccb19c0c6800f15f1cf43b273d17370f8ba
|
[
"MIT"
] | 630
|
2015-01-05T00:16:29.000Z
|
2019-09-11T03:53:45.000Z
|
core/languages.lua
|
simoncozens/sile
|
566eaccb19c0c6800f15f1cf43b273d17370f8ba
|
[
"MIT"
] | 586
|
2015-01-06T03:14:51.000Z
|
2019-09-16T10:32:44.000Z
|
core/languages.lua
|
simoncozens/sile
|
566eaccb19c0c6800f15f1cf43b273d17370f8ba
|
[
"MIT"
] | 101
|
2015-01-13T14:11:15.000Z
|
2019-08-22T12:26:20.000Z
|
local loadkit = require("loadkit")
local cldr = require("cldr")
loadkit.register("ftl", function (file)
local contents = assert(file:read("*a"))
file:close()
return assert(SILE.fluent:add_messages(contents))
end)
SILE.languageSupport = {
languages = {},
loadLanguage = function (language)
language = language or SILE.settings:get("document.language")
language = cldr.locales[language] and language or "und"
if SILE.languageSupport.languages[language] then return end
if SILE.hyphenator.languages[language] then return end
local langresource = string.format("languages.%s", language)
local gotlang, lang = pcall(require, langresource)
if not gotlang then
if lang:match("not found") then lang = "no support for this language" end
SU.warn("Error loading language " .. language .. ": " .. lang)
SILE.languageSupport.languages[language] = {} -- Don't try again
end
local ftlresource = string.format("i18n.%s", language)
SU.debug("fluent", "Loading FTL resource", ftlresource, "into locale", language)
SILE.fluent:set_locale(language)
local gotftl, ftl = pcall(require, ftlresource)
if not gotftl then
if ftl:match("not found") then ftl = "no localizations for this language" end
SU.warn("Error loading localizations " .. language .. ": " .. ftl)
end
if type(lang) == "table" and lang.init then
lang.init()
end
end
}
SILE.registerCommand("language", function (options, content)
local main = SU.required(options, "main", "language setting")
SILE.languageSupport.loadLanguage(main)
if content[1] then
SILE.settings:temporarily(function ()
SILE.settings:set("document.language", main)
SILE.process(content)
end)
else
SILE.settings:set("document.language", main)
end
end)
SILE.registerCommand("fluent", function (options, content)
local key = content[1]
local locale = options.locale or SILE.settings:get("document.language")
SU.debug("fluent", "Looking for", key, "in", locale)
local entry
if key then
SILE.fluent:set_locale(locale)
entry = SILE.fluent:get_message(key)
else
SU.warn("Fluent localization function called without passing a valid message id")
end
local message
if entry then
message = entry:format(options)
else
SU.warn(string.format("No localized message for %s found in locale %s", key, locale))
end
SILE.process({ message })
end)
SILE.registerCommand("ftl", function (options, content)
local locale = options.locale or SILE.settings:get("document.language")
SU.debug("fluent", "Loading message(s) into locale", locale)
SILE.fluent:set_locale(locale)
if options.src then
SILE.fluent:load_file(options.src, locale)
elseif SU.hasContent(content) then
local input = content[1]
SILE.fluent:add_messages(input, locale)
end
end)
require("languages.unicode")
-- The following languages neither have hyphenation nor specific
-- language support at present. This code is here to suppress warnings.
SILE.hyphenator.languages.ar = { patterns = {} }
SILE.hyphenator.languages.bo = { patterns = {} }
SILE.hyphenator.languages.sd = { patterns = {} }
SILE.hyphenator.languages.ur = { patterns = {} }
| 35.307692
| 89
| 0.70526
|
c9bfabb7d7798394df6c3f66120287e679e16f46
| 1,721
|
ts
|
TypeScript
|
packages/dx-grid-core/src/utils/group-panel.ts
|
justincasey/devextreme-reactive
|
b026219116c1374eb548541f947a899f32bb7dd1
|
[
"Apache-2.0"
] | 1,955
|
2017-05-16T12:16:27.000Z
|
2022-03-27T17:29:41.000Z
|
packages/dx-grid-core/src/utils/group-panel.ts
|
MaximKudriavtsev/devextreme-reactive
|
2ccbc84239bfc00bff5de2172678682cc5328ad2
|
[
"Apache-2.0"
] | 1,812
|
2017-05-17T07:33:28.000Z
|
2022-03-24T00:11:06.000Z
|
packages/dx-grid-core/src/utils/group-panel.ts
|
MaximKudriavtsev/devextreme-reactive
|
2ccbc84239bfc00bff5de2172678682cc5328ad2
|
[
"Apache-2.0"
] | 395
|
2017-05-16T11:01:00.000Z
|
2022-03-15T05:47:50.000Z
|
import { getTargetColumnGeometries } from './column-geometries';
import { TargetColumnGeometry, GetGroupCellTargetIndexFn } from '../types';
import { PureComputed } from '@devexpress/dx-core';
const isOnTheSameLine: PureComputed<[TargetColumnGeometry, number], boolean> = (geometry, y) => (
y >= geometry.top && y <= geometry.bottom
);
const rectToObject = ({
top, right, bottom, left,
}: TargetColumnGeometry) => ({
top, right, bottom, left,
});
const collapseGapsBetweenItems: PureComputed<[TargetColumnGeometry[]]> = geometries => (
geometries.map((geometry, index) => {
if (index !== geometries.length - 1 && geometry.top === geometries[index + 1].top) {
return {
...geometry,
right: geometries[index + 1].left,
};
}
return geometry;
}));
export const getGroupCellTargetIndex: GetGroupCellTargetIndexFn = (
geometries, sourceIndex, { x, y },
) => {
if (geometries.length === 0) return 0;
const targetGeometries = sourceIndex !== -1
? getTargetColumnGeometries(geometries, sourceIndex)
: geometries.map(rectToObject);
const targetIndex = collapseGapsBetweenItems(targetGeometries)
.findIndex((geometry, index) => {
const inVerticalBounds = isOnTheSameLine(geometry, y);
const inHorizontalBounds = x >= geometry.left && x <= geometry.right;
const shouldGoFirst = index === 0 && x < geometry.left;
const shouldGoOnLineBreak = !inVerticalBounds
&& !!geometries[index - 1]
&& isOnTheSameLine(geometries[index - 1], y);
return (inVerticalBounds && inHorizontalBounds)
|| shouldGoFirst
|| shouldGoOnLineBreak;
});
return targetIndex === -1 ? geometries.length : targetIndex;
};
| 33.745098
| 97
| 0.67054
|
c4cbe47d20eb60dbe65c1355487554b4fd602940
| 2,043
|
dart
|
Dart
|
chapter_5/step_2_navigator_one/lib/src/ui/story/story_web_view.dart
|
PacktPublishing/Flutter-UI-Projects-for-iOS-Android-Web
|
fde7b6b95332ad05114930c72ed8b74a785fffc5
|
[
"MIT"
] | 5
|
2021-12-27T08:41:00.000Z
|
2022-03-27T22:53:27.000Z
|
chapter_5/step_2_navigator_one/lib/src/ui/story/story_web_view.dart
|
PacktPublishing/Flutter-UI-Projects-for-iOS-Android-Web
|
fde7b6b95332ad05114930c72ed8b74a785fffc5
|
[
"MIT"
] | 3
|
2021-12-05T12:21:43.000Z
|
2022-01-17T17:34:18.000Z
|
chapter_5/step_2_navigator_one/lib/src/ui/story/story_web_view.dart
|
PacktPublishing/Flutter-UI-Projects-for-iOS-Android-Web
|
fde7b6b95332ad05114930c72ed8b74a785fffc5
|
[
"MIT"
] | 1
|
2021-12-27T08:41:01.000Z
|
2021-12-27T08:41:01.000Z
|
import 'package:flutter/foundation.dart';
import 'package:flutter/gestures.dart';
import 'package:flutter/material.dart';
import 'package:hacker_news/src/data/models/story.dart';
import 'package:hacker_news/src/ui/story/story_controller.dart';
import 'package:provider/provider.dart';
import 'package:webview_flutter/webview_flutter.dart';
class StoryWebView extends StatefulWidget {
const StoryWebView({
Key? key,
required this.storyId,
}) : super(key: key);
final int storyId;
static const routeName = '/story-source';
@override
State<StoryWebView> createState() => _StoryWebViewState();
}
class _StoryWebViewState extends State<StoryWebView> {
late final StoryController _storyController;
@override
void initState() {
super.initState();
_storyController = StoryController(context.read())
..getStoryById(widget.storyId);
}
@override
Widget build(BuildContext context) {
return ChangeNotifierProvider.value(
value: _storyController,
child: Scaffold(
appBar: AppBar(),
body: Selector<StoryController, Story?>(
selector: (context, controller) => controller.selectedStory,
builder: (context, story, child) {
if (story == null) {
return const Center(
child: CircularProgressIndicator(),
);
}
if (story.url == null) {
return const Center(
child: Text('This story does not have a link'),
);
}
return WebView(
javascriptMode: JavascriptMode.unrestricted,
initialUrl: story.url.toString(),
gestureRecognizers: <Factory<VerticalDragGestureRecognizer>>{
Factory<VerticalDragGestureRecognizer>(
() => VerticalDragGestureRecognizer(),
)
},
);
},
),
),
);
}
}
class StoryWebViewArguments {
StoryWebViewArguments(this.storyId);
final int storyId;
}
| 27.986301
| 75
| 0.623103
|
58147ea45a43868d286b9b2dacc05f2eb7c3d551
| 220
|
css
|
CSS
|
src/components/global/main/main.css
|
calumryan/culturebook-frontend
|
503442e9ff0d7279160c63d17c782c9cec05e57c
|
[
"MIT"
] | null | null | null |
src/components/global/main/main.css
|
calumryan/culturebook-frontend
|
503442e9ff0d7279160c63d17c782c9cec05e57c
|
[
"MIT"
] | null | null | null |
src/components/global/main/main.css
|
calumryan/culturebook-frontend
|
503442e9ff0d7279160c63d17c782c9cec05e57c
|
[
"MIT"
] | null | null | null |
.c-main {
background-color: $navigation-color--offset;
@media (--upto-medium-screen) {
margin-top: $banner-height--small;
}
@media (--from-medium-screen) {
margin-right: $navigation-width--large;
}
}
| 18.333333
| 46
| 0.636364
|
126dc99dd469dca6a99812f2625f49e22e4422c7
| 2,459
|
cs
|
C#
|
Game/Simulation/Village.cs
|
nshcat/asciikingdom
|
84dea7bfa8efce8396b36659b14b9ef9bccb4c67
|
[
"MIT"
] | null | null | null |
Game/Simulation/Village.cs
|
nshcat/asciikingdom
|
84dea7bfa8efce8396b36659b14b9ef9bccb4c67
|
[
"MIT"
] | null | null | null |
Game/Simulation/Village.cs
|
nshcat/asciikingdom
|
84dea7bfa8efce8396b36659b14b9ef9bccb4c67
|
[
"MIT"
] | null | null | null |
using System.Collections.Generic;
using Engine.Core;
using Engine.Graphics;
using Game.Serialization;
namespace Game.Simulation
{
/// <summary>
/// A village is a small settlement associated with a city which produces basic, raw resources and is
/// taxed by its associated city.
/// </summary>
public class Village : PopulatedSite
{
/// <summary>
/// The name of this village
/// </summary>
public override string Name { get; set; }
/// <summary>
/// Village names are not shown on the world map
/// </summary>
public override bool ShowName => false;
/// <summary>
/// The position of this village on the world map
/// </summary>
public override Position Position { get; set; }
/// <summary>
/// The city this village is associated with
/// </summary>
public City AssociatedCity { get; set; }
/// <summary>
/// List of village growth stages
/// </summary>
private static List<SiteGrowthStage> GrowthStages { get; } = new List<SiteGrowthStage>
{
new SiteGrowthStage(0, "Small Hamlet", new Tile(61, Color.FromHex("#CDD2D2"))),
new SiteGrowthStage(25, "Hamlet", new Tile(240, Color.FromHex("#CDD2D2"))),
new SiteGrowthStage(50, "Small Village", new Tile(145, Color.FromHex("#CDD2D2"))),
new SiteGrowthStage(100, "Village", new Tile(146, Color.FromHex("#CDD2D2"))),
};
/// <summary>
/// Create a bew village with given name, position, initial population and associated city
/// </summary>
public Village(string name, Position position, int initialPopulation, City associatedCity)
: base(GrowthStages, initialPopulation)
{
this.Name = name;
this.Position = position;
this.AssociatedCity = associatedCity;
}
public override void Update(int weeks)
{
// TODO
}
/// <summary>
/// Create a simulation view from this object
/// </summary>
public VillageView ToView()
{
return new VillageView
{
Id = this.Id,
Name = this.Name,
Population = this.Population,
Position = this.Position
};
}
}
}
| 32.786667
| 105
| 0.55063
|
58ce390ae387a3ae7b5b0f58d595c1977e86c07d
| 3,882
|
css
|
CSS
|
static/css/style.css
|
lamproslntz/cbir-deep-learning
|
34a43cbe28afd4af49612c6e8010587b54a0656a
|
[
"MIT"
] | 2
|
2021-11-24T19:59:32.000Z
|
2021-11-26T14:32:31.000Z
|
static/css/style.css
|
lamproslntz/cbir-deep-learning
|
34a43cbe28afd4af49612c6e8010587b54a0656a
|
[
"MIT"
] | null | null | null |
static/css/style.css
|
lamproslntz/cbir-deep-learning
|
34a43cbe28afd4af49612c6e8010587b54a0656a
|
[
"MIT"
] | 1
|
2022-01-21T15:31:14.000Z
|
2022-01-21T15:31:14.000Z
|
:root {
--white: #fff;
--black: #000;
--rose-taupe: rgb(141, 95, 91);
--rose-taupe-trans: rgba(141, 95, 91, 0.6);
--auburn-wave: #d8a194
}
/* General */
* {
margin: 0;
padding: 0;
}
::-webkit-scrollbar {
display: none;
}
body {
color: var(--black);
background: var(--white);
font-family: 'Ubuntu', sans-serif;
height: 100vh;
overflow: hidden;
}
.container {
width: 100%;
height: 100%;
overflow-y: scroll;
scroll-behavior: smooth;
scroll-snap-type: y mandatory;
}
/* Navigation Bar */
.navbar {
background: var(--rose-taupe-trans);
color: var(--white);
position: fixed;
top: 0;
z-index: 1;
display: flex;
justify-content: space-around;
align-items: center;
width: 100%;
height: 60px;
}
/* Section -- General */
section {
width: 100%;
height: 100vh;
display: flex;
flex-direction: column;
align-items: center;
justify-content: center;
text-align: center;
scroll-snap-align: start;
}
/* Section -- Search */
.search {
background: url(../img/img-background.jpg) no-repeat center center/cover;
}
form {
width: 80%;
height: 50%;
display: flex;
align-items: center;
justify-content: center;
}
.upload-btn {
opacity: 0;
width: 0.1px;
height: 0.1px;
position: absolute;
}
.search label {
/* padding: 10px; */
margin-left: 10px;
margin-right: 10px;
/* display: block; */
/* position: relative; */
display: flex;
align-items: center;
justify-content: center;
min-width: 20%;
min-height: 20%;
font-size: 15px;
background: var(--rose-taupe);
color: var(--white);
border: none;
border-radius: 5px;
outline: none;
cursor: pointer;
transition: 0.3s;
}
.search label:hover {
background: var(--auburn-wave);
color: var(--white);
}
.search-btn {
padding: 10px;
margin-left: 10px;
margin-right: 10px;
min-width: 20%;
min-height: 20%;
font-size: 15px;
background: var(--rose-taupe);
color: var(--white);
border: none;
border-radius: 5px;
outline: none;
transition: 0.3s;
}
button:hover{
background: var(--auburn-wave);
color: var(--white);
}
/* Section -- Results */
.results {
height: fit-content;
text-align: start;
}
.result {
width: 40%;
margin: 20px;
padding: 20px;
display: flex;
flex-direction: column;
justify-content: center;
text-align: center;
line-height: 2.5;
word-wrap: break-word;
box-shadow: 0 5px 10px 0px rgba(0,0,0,0.2);
transition: 0.3s;
}
.image {
display: flex;
justify-content: center;
text-align: center;
}
.image img {
width: 60%;
}
.result:hover{
box-shadow: 0 5px 10px 0px rgba(0,0,0,0.5);
}
/* Large devices */
@media screen and (max-width: 992px) {
/* Section -- Results */
.image img {
width: 80%;
}
}
/* Medium devices */
@media screen and (max-width: 768px) {
/* Navigation Bar */
.creator {
display: none;
}
/* Section -- Search */
.search label {
min-width: 30%;
}
.search-btn {
min-width: 30%;
}
/* Section -- Results */
.result {
width: 80%;
}
.image img {
width: 80%;
}
}
/* Extra small devices */
@media only screen and (max-width: 600px) {
/* Navigation Bar */
.creator {
display: none;
}
/* Section -- Search */
form {
display: flex;
flex-direction: column;
align-items: center;
justify-content: center;
}
.search label {
margin: 10px;
min-width: 50%;
}
.search-btn {
margin: 10px;
min-width: 50%;
}
/* Section -- Results */
.result {
width: 80%;
}
.image img {
width: 80%;
}
}
| 17.026316
| 77
| 0.549717
|
c68a486d7e57618b6a75d692580cd9766a6cce63
| 7,495
|
css
|
CSS
|
public_html/style.css
|
juliocnsouzadev/SunAndEarth
|
f803aff03ffd58639007be2b9f83e838e2d48c11
|
[
"MIT"
] | null | null | null |
public_html/style.css
|
juliocnsouzadev/SunAndEarth
|
f803aff03ffd58639007be2b9f83e838e2d48c11
|
[
"MIT"
] | null | null | null |
public_html/style.css
|
juliocnsouzadev/SunAndEarth
|
f803aff03ffd58639007be2b9f83e838e2d48c11
|
[
"MIT"
] | null | null | null |
html, body {
/* The universe takes up all available space */
width: 100%;
height: 100%;
/* The universe is black */
background-color: black;
}
#sun {
position: absolute;
/* Positions the top-left corner of the image to be *
/* in the middle of the box */
top: 50%;
left: 50%;
border-color: orange;
border-width: 2px;
border-style: solid;
border-radius: 50%;
box-shadow: 0 0 256px orange;
/* Play with these numbers to see what it does */
height: 200px;
width: 200px;
margin-top: -100px;
margin-left: -100px;
}
#mercurio {
/* Style your earth */
position: absolute;
top: 0;
left: 50%;
height: 16px;
width: 16px;
margin-left: -25px;
margin-top: -25px;
}
#venus {
/* Style your earth */
position: absolute;
top: 0;
left: 50%;
height: 36px;
width: 36px;
margin-left: -25px;
margin-top: -25px;
}
#earth {
/* Style your earth */
position: absolute;
top: 0;
left: 50%;
height: 30px;
width: 30px;
margin-left: -25px;
margin-top: -25px;
}
#mars {
/* Style your earth */
position: absolute;
top: 0;
left: 50%;
height: 20px;
width: 20px;
margin-left: -25px;
margin-top: -25px;
}
#jupter {
/* Style your earth */
position: absolute;
top: 0;
left: 50%;
height: 60px;
width: 55px;
margin-left: -25px;
margin-top: -25px;
}
#saturno {
/* Style your earth */
position: absolute;
top: 0;
left: 50%;
height: 35px;
width: 50px;
margin-left: -25px;
margin-top: -25px;
}
#uranus {
/* Style your earth */
position: absolute;
top: 0;
left: 50%;
height: 25px;
width: 25px;
margin-left: -25px;
margin-top: -25px;
}
#netuno {
/* Style your earth */
position: absolute;
top: 0;
left: 50%;
height: 22px;
width: 22px;
margin-left: -25px;
margin-top: -25px;
}
#plutao {
/* Style your earth */
position: absolute;
top: 0;
left: 50%;
height: 10px;
width: 10px;
margin-left: -25px;
margin-top: -25px;
}
#mercurio-orbit {
/* For Section #2 */
position: absolute;
top: 50%;
left: 50%;
width: 300px;
height: 300px;
margin-top: -150px;
margin-left: -150px;
border-width: 0px;
border-style: dotted;
border-color: white;
border-radius: 50%;
-webkit-animation: spin-right 11s linear infinite;
-moz-animation: spin-right 11s linear infinite;
-ms-animation: spin-right 11s linear infinite;
-o-animation: spin-right 11s linear infinite;
animation: spin-right 11s linear infinite;
}
#venus-orbit {
/* For Section #2 */
position: absolute;
top: 50%;
left: 50%;
width: 400px;
height: 400px;
margin-top: -200px;
margin-left: -200px;
border-width: 0px;
border-style: dotted;
border-color: white;
border-radius: 50%;
-webkit-animation: spin-right 8s linear infinite;
-moz-animation: spin-right 8s linear infinite;
-ms-animation: spin-right 8s linear infinite;
-o-animation: spin-right 8s linear infinite;
animation: spin-right 8s linear infinite;
}
#earth-orbit {
/* For Section #2 */
position: absolute;
top: 50%;
left: 50%;
width: 500px;
height: 500px;
margin-top: -250px;
margin-left: -250px;
border-width: 0px;
border-style: dotted;
border-color: white;
border-radius: 50%;
-webkit-animation: spin-right 10s linear infinite;
-moz-animation: spin-right 10s linear infinite;
-ms-animation: spin-right 10s linear infinite;
-o-animation: spin-right 10s linear infinite;
animation: spin-right 10s linear infinite;
}
#mars-orbit {
/* For Section #2 */
position: absolute;
top: 50%;
left: 50%;
width: 600px;
height: 600px;
margin-top: -300px;
margin-left: -300px;
border-width: 0px;
border-style: dotted;
border-color: white;
border-radius: 50%;
-webkit-animation: spin-right 15s linear infinite;
-moz-animation: spin-right 15s linear infinite;
-ms-animation: spin-right 15s linear infinite;
-o-animation: spin-right 15s linear infinite;
animation: spin-right 15s linear infinite;
}
#jupter-orbit {
/* For Section #2 */
position: absolute;
top: 50%;
left: 50%;
width: 700px;
height: 700px;
margin-top: -350px;
margin-left: -350px;
border-width: 0px;
border-style: dotted;
border-color: white;
border-radius: 50%;
-webkit-animation: spin-right 13s linear infinite;
-moz-animation: spin-right 13s linear infinite;
-ms-animation: spin-right 13s linear infinite;
-o-animation: spin-right 13s linear infinite;
animation: spin-right 13s linear infinite;
}
#saturno-orbit {
/* For Section #2 */
position: absolute;
top: 50%;
left: 50%;
width: 800px;
height: 800px;
margin-top: -400px;
margin-left: -400px;
border-width: 0px;
border-style: dotted;
border-color: white;
border-radius: 50%;
-webkit-animation: spin-right 10s linear infinite;
-moz-animation: spin-right 10s linear infinite;
-ms-animation: spin-right 10s linear infinite;
-o-animation: spin-right 10s linear infinite;
animation: spin-right 10s linear infinite;
}
#uranus-orbit {
/* For Section #2 */
position: absolute;
top: 50%;
left: 50%;
width: 900px;
height: 900px;
margin-top: -450px;
margin-left: -450px;
border-width: 0px;
border-style: dotted;
border-color: white;
border-radius: 50%;
-webkit-animation: spin-right 14s linear infinite;
-moz-animation: spin-right 14s linear infinite;
-ms-animation: spin-right 14s linear infinite;
-o-animation: spin-right 14s linear infinite;
animation: spin-right 14s linear infinite;
}
#netuno-orbit {
/* For Section #2 */
position: absolute;
top: 50%;
left: 50%;
width: 1000px;
height: 1000px;
margin-top: -500px;
margin-left: -500px;
border-width: 0px;
border-style: dotted;
border-color: white;
border-radius: 50%;
-webkit-animation: spin-right 12s linear infinite;
-moz-animation: spin-right 12s linear infinite;
-ms-animation: spin-right 12s linear infinite;
-o-animation: spin-right 12s linear infinite;
animation: spin-right 12s linear infinite;
}
#plutao-orbit {
/* For Section #2 */
position: absolute;
top: 50%;
left: 50%;
width: 1100px;
height: 1100px;
margin-top: -550px;
margin-left: -550px;
border-width: 0px;
border-style: dotted;
border-color: white;
border-radius: 50%;
-webkit-animation: spin-right 10s linear infinite;
-moz-animation: spin-right 10s linear infinite;
-ms-animation: spin-right 10s linear infinite;
-o-animation: spin-right 10s linear infinite;
animation: spin-right 10s linear infinite;
}
@-webkit-keyframes spin-right {
100% {
-webkit-transform: rotate(360deg);
-moz-transform: rotate(360deg);
-ms-transform: rotate(360deg);
-o-transform: rotate(360deg);
transform: rotate(360deg);
}
}
@keyframes spin-right {
100% {
-webkit-transform: rotate(360deg);
-moz-transform: rotate(360deg);
-ms-transform: rotate(360deg);
-o-transform: rotate(360deg);
transform: rotate(360deg);
}
}
| 20.534247
| 57
| 0.611608
|
ab770b0612c04c28b1f996fb8ea5e460700f516d
| 985
|
sql
|
SQL
|
src/main/resources/resources/cohortanalysis/heraclesanalyses/sql/600_601.sql
|
patrickfischer1/WebAPI
|
759334d4161267b1f728a624590b566bb64abaec
|
[
"Apache-2.0"
] | 99
|
2015-01-06T18:24:25.000Z
|
2022-03-10T16:34:04.000Z
|
src/main/resources/resources/cohortanalysis/heraclesanalyses/sql/600_601.sql
|
patrickfischer1/WebAPI
|
759334d4161267b1f728a624590b566bb64abaec
|
[
"Apache-2.0"
] | 1,261
|
2015-01-01T17:33:35.000Z
|
2022-03-28T18:16:27.000Z
|
src/main/resources/resources/cohortanalysis/heraclesanalyses/sql/600_601.sql
|
patrickfischer1/WebAPI
|
759334d4161267b1f728a624590b566bb64abaec
|
[
"Apache-2.0"
] | 159
|
2015-01-12T13:39:42.000Z
|
2022-03-15T13:39:31.000Z
|
-- @analysisId @analysisName
--insert into @results_schema.heracles_results (cohort_definition_id, analysis_id, stratum_1, count_value)
select c1.cohort_definition_id,
@analysisId as analysis_id,
po1.procedure_CONCEPT_ID as stratum_1,
cast( '' as varchar(1) ) as stratum_2, cast( '' as varchar(1) ) as stratum_3, cast( '' as varchar(1) ) as stratum_4,
COUNT_BIG(distinct po1.@fieldName) as count_value
into #results_@analysisId
from
@CDM_schema.procedure_occurrence po1
inner join #HERACLES_cohort c1
on po1.person_id = c1.subject_id
--{@procedure_concept_ids != '' | @cohort_period_only == 'true'}?{
WHERE
--{@cohort_period_only == 'true'}?{
po1.procedure_date>=c1.cohort_start_date and po1.procedure_date<=c1.cohort_end_date
--}
--{@procedure_concept_ids != '' & @cohort_period_only == 'true'}?{
AND
--}
--{@procedure_concept_ids != ''}?{
po1.procedure_concept_id in (@procedure_concept_ids)
--}
--}
group by c1.cohort_definition_id,
po1.procedure_CONCEPT_ID
;
| 36.481481
| 118
| 0.745178
|
0a83308032a8190b1358e31968bea22966e45ecf
| 546
|
rs
|
Rust
|
examples/views/zstack.rs
|
robbert-vdh/vizia
|
a84c7258072f22fd2ad92f91b92a7cebbed2b118
|
[
"MIT"
] | 53
|
2021-10-30T14:36:21.000Z
|
2022-02-16T17:14:12.000Z
|
examples/views/zstack.rs
|
robbert-vdh/vizia
|
a84c7258072f22fd2ad92f91b92a7cebbed2b118
|
[
"MIT"
] | 35
|
2022-02-22T22:14:40.000Z
|
2022-03-31T17:09:42.000Z
|
examples/views/zstack.rs
|
robbert-vdh/vizia
|
a84c7258072f22fd2ad92f91b92a7cebbed2b118
|
[
"MIT"
] | 8
|
2021-12-18T17:29:02.000Z
|
2022-02-10T00:39:43.000Z
|
use vizia::prelude::*;
const COLORS: [Color; 3] = [Color::red(), Color::green(), Color::blue()];
fn main() {
Application::new(|cx| {
ZStack::new(cx, |cx| {
for i in 0..3 {
Element::new(cx)
.size(Pixels(100.0))
.top(Pixels(10.0 * i as f32))
.left(Pixels(10.0 * i as f32))
.background_color(COLORS[i]);
}
})
.left(Pixels(10.0))
.top(Pixels(10.0));
})
.title("ZStack")
.run();
}
| 24.818182
| 73
| 0.415751
|
8de629bbb6524f78415a4748037cb2c0ce3e4ad0
| 5,276
|
js
|
JavaScript
|
App UI/app/screens/register/register.js
|
ApurvaBiswas5899/WIT-ACE-Call-for-code-Team-Eternals
|
edd7a9e093df3423c452fa6403d33c1be9f6b3de
|
[
"Apache-2.0",
"MIT"
] | null | null | null |
App UI/app/screens/register/register.js
|
ApurvaBiswas5899/WIT-ACE-Call-for-code-Team-Eternals
|
edd7a9e093df3423c452fa6403d33c1be9f6b3de
|
[
"Apache-2.0",
"MIT"
] | null | null | null |
App UI/app/screens/register/register.js
|
ApurvaBiswas5899/WIT-ACE-Call-for-code-Team-Eternals
|
edd7a9e093df3423c452fa6403d33c1be9f6b3de
|
[
"Apache-2.0",
"MIT"
] | null | null | null |
import React from 'react';
import {
Text,
View,
Image,
TextInput,
SafeAreaView,
Pressable,
} from 'react-native';
import {ScrollView} from 'react-native-gesture-handler';
import {COLORS} from '../../constants';
import viewStyle from '../../screenStyle';
import ProcessButton from '../../components/proceed_button';
import {isEmail} from '../../utils/helpers';
export default class Register extends React.PureComponent {
constructor(props) {
super(props);
this.email = '';
this.password = '';
this.name = '';
this.lastname = '';
this.image = '';
this.confirmPassword = '';
this.state = {
isloading: false,
error: '',
};
}
onRegister = async () => {
const {actions, navigation} = this.props;
if (this.name.length === 0) {
this.setState({
error: 'Please enter your first name',
});
return;
}
if (this.lastname.length === 0) {
this.setState({
error: 'Please enter your first lastname',
});
return;
}
if (!isEmail(this.email)) {
this.setState({
error: 'Please enter valid email',
});
return;
}
if (this.password.length < 8) {
this.setState({
error: 'Please enter a better password',
});
return;
}
if (this.password !== this.confirmPassword) {
this.setState({
error: "Passwords dosen't match",
});
return;
}
this.setState({
isloading: true,
});
const result = await actions.registerUser();
if (!result?.success) {
this.setState({
isloading: false,
error: result.message,
});
}
if (result?.success) {
navigation.navigate('Verification', {email: this.email});
}
};
render() {
const {error, isloading} = this.state;
const button = (
<Pressable
style={[viewStyle.pressableIOS, styles.button]}
android_ripple={{color: 'white'}}
onPress={this.onRegister}>
<Text style={styles.loginOptions}>Signup</Text>
</Pressable>
);
return (
<SafeAreaView style={{flex: 1, backgroundColor: '#FFF'}}>
<ScrollView style={{flex: 1}}>
<View style={{flex: 1, paddingBottom: 20}}>
<Image
source={require('../../assets/images/login.png')}
resizeMode="contain"
style={{height: 250, width: 400}}
/>
<Text style={viewStyle.headingStyle}>burpp it!!</Text>
{error ? (
<Text style={viewStyle.errorTextStyle}>{error}</Text>
) : null}
<View style={viewStyle.inputStyle}>
<TextInput
placeholder="First Name"
placeholderTextColor="#ea4e50"
onChangeText={text => {
this.name = text;
}}
style={styles.input}
/>
</View>
<View style={viewStyle.inputStyle}>
<TextInput
placeholder="Last Name"
placeholderTextColor="#ea4e50"
onChangeText={text => {
this.lastname = text;
}}
style={styles.input}
/>
</View>
<View style={viewStyle.inputStyle}>
<TextInput
placeholder="Email"
placeholderTextColor="#ea4e50"
onChangeText={text => {
this.email = text;
}}
style={styles.input}
/>
</View>
<View style={viewStyle.inputStyle}>
<TextInput
secureTextEntry
placeholder="Password"
placeholderTextColor="#ea4e50"
onChangeText={text => {
this.password = text;
}}
style={styles.input}
/>
</View>
<View style={viewStyle.inputStyle}>
<TextInput
secureTextEntry
placeholder="Confirm Password"
placeholderTextColor="#ea4e50"
onChangeText={text => {
this.confirmPassword = text;
}}
style={styles.input}
/>
</View>
<ProcessButton observe={isloading} button={button} />
</View>
</ScrollView>
</SafeAreaView>
);
}
}
const styles = {
errorText: {
color: '#000',
},
centerContainer: {
// fontFamily: 'SemiBold',
marginHorizontal: 55,
textAlign: 'center',
marginTop: 30,
marginBottom: 10,
opacity: 0.4,
},
input: {paddingHorizontal: 10, flex: 1},
button: {
marginHorizontal: 55,
alignItems: 'center',
justifyContent: 'center',
marginTop: 30,
backgroundColor: '#de2c47',
paddingVertical: 10,
borderRadius: 23,
},
loginOptions: {
color: 'white',
// fontFamily: 'SemiBold',
},
register: {
marginHorizontal: 55,
alignItems: 'center',
justifyContent: 'center',
marginTop: 30,
borderColor: COLORS.primary,
borderWidth: 2,
backgroundColor: 'white',
paddingVertical: 10,
borderRadius: 23,
},
};
| 26.248756
| 67
| 0.511751
|
f1afee5b6a70f71c3a03b2e738bd5003ba1795f8
| 19,242
|
lua
|
Lua
|
Contents/mods/FishingPlus/media/lua/client/FishingPlus.lua
|
MassCraxx/FishingPlus
|
4ad32897e0ccd0200c54f2af0645bc9ad19ecd01
|
[
"MIT"
] | null | null | null |
Contents/mods/FishingPlus/media/lua/client/FishingPlus.lua
|
MassCraxx/FishingPlus
|
4ad32897e0ccd0200c54f2af0645bc9ad19ecd01
|
[
"MIT"
] | null | null | null |
Contents/mods/FishingPlus/media/lua/client/FishingPlus.lua
|
MassCraxx/FishingPlus
|
4ad32897e0ccd0200c54f2af0645bc9ad19ecd01
|
[
"MIT"
] | null | null | null |
--***********************************************************
--** Fishing+ v1 **
--** MassCraxx + ROBERT JOHNSON **
--***********************************************************
--[[
[h1]Vanilla Fishing Rework[/h1]
[h3]Features:[/h3]
> [b]Adds features of [url=https://steamcommunity.com/sharedfiles/filedetails/?id=2696281225]Bricks Wants Fish[/url] (Big thanks to bricks for letting me include his idea)[/b]
[list]
[*] Fish size chance now scales off fishing level
[*] High fishing levels have a better chance of catching bigger fish
[*] Small fish become nonexistent past Fishing level 5
[*] Added new fish size: Prize - 50% larger than Big fish
[*] Adjusted fish size chance distribution
[/list]
> [b]Reworked XP gain to be more rewarding[/b]
[list]
[*] Increase XP gain in general (adjustable via Sandbox options)
[*] Gained XP scales with the actual size of caught fish
[*] Catching bigger fish gains significantly more XP than catching smaller ones
[*] Show gained XP just as in Foraging
[/list]
> [b]Reworked trash loot generation[/b]
[list]
[*] Catching trash gains fixed amount of XP
[*] Trash loot is taken from a weighted loot table containing many different items
[*] Chance of slightly more useful trash items with higher fishing level
[/list]
[h3]Sandbox options:[/h3]
[list]
[*] Choose from 3 different XP gain options - High, Medium, Low (Vanilla-near)
[*] Free XP multiplier for fine balancing
[*] Fish nutrition factor to adjust the nutrition value of caught fish
[*] En- / Disable reworked trash loot table
[*] Set if fish abundance should decrease every catch or only if catching fish
[/list]
Can be en- and disabled on existing savegames and is fully multiplayer compatible.
On dedicated servers, Sandbox options can be adjusted any time (may require restart).
[hr][/hr]
Workshop ID: 2757633688
Mod ID: FishingPlus
]]
FishingPlus = _G['FishingPlus'] or {}
OG_ISFishingAction_perform = ISFishingAction.perform
function ISFishingAction:perform()
local decreaseAbundance = SandboxVars.FishingPlus.DecreaseAbundance or 1;
if decreaseAbundance == 1 then
OG_ISFishingAction_perform(self);
return;
end
self.rod:setJobDelta(0.0);
self.character:PlayAnim("Idle");
if self.usingSpear then
self.splashTimer = 0;
end
-- get the fishing zone to see how many fishes left
local updateZone = self:getFishingZone();
if updateZone then
local fishLeft = tonumber(updateZone:getName());
if getGametimeTimestamp() - updateZone:getLastActionTimestamp() > 20000 then
fishLeft = math.max(ZombRand(10,25) + self.fishingZoneIncrease, 0);
updateZone:setName(tostring(fishLeft));
updateZone:setOriginalName(tostring(fishLeft));
end
if fishLeft == 0 then
self.character:SetVariable("FishingFinished","true");
-- needed to remove from queue / start next.
ISBaseTimedAction.perform(self);
return;
end
end
local caughtFish = false;
if self:attractFish() then -- caught something !
local fish = self:getFish();
if updateZone and fish.name then -- only update if caught item is a fish
local fishLeft = tonumber(updateZone:getName());
updateZone:setName(tostring(fishLeft - 1));
updateZone:setLastActionTimestamp(getGametimeTimestamp());
if isClient() then updateZone:sendToServer() end
end
caughtFish = true;
else
if ZombRand(9) == 0 then -- give some xp even for a fail
self.character:getXp():AddXP(Perks.Fishing, 1);
end
if self.lureProperties and ZombRand(100) <= self.lureProperties.chanceOfBreak then -- maybe remove the lure
self.character:getSecondaryHandItem():Use();
self.character:setSecondaryHandItem(nil);
end
end
if not updateZone then -- register a new fishing zone
local nbrOfFish = math.max(ZombRand(10,25) + self.fishingZoneIncrease, 0);
local x,y,z = self.tile:getSquare():getX(), self.tile:getSquare():getY(), self.tile:getSquare():getZ()
local updateZone = getWorld():registerZone(tostring(nbrOfFish), "Fishing", x - 20, y - 20, z, 40, 40);
updateZone:setOriginalName(tostring(nbrOfFish));
updateZone:setLastActionTimestamp(getGametimeTimestamp());
if isClient() then updateZone:sendToServer() end
end
if self.fishingUI then
self.fishingUI:updateZoneProgress(updateZone);
end
local newAction = nil;
if not self.usingSpear then
local lure = ISWorldObjectContextMenu.getFishingLure(self.character, self.rod)
if lure then
ISWorldObjectContextMenu.equip(self.character, self.character:getSecondaryHandItem(), lure:getType(), false);
newAction = ISFishingAction:new(self.character, self.tile, self.rod, lure, self.fishingUI);
end
else
newAction = ISFishingAction:new(self.character, self.tile, self.rod, nil, self.fishingUI);
end
if newAction then
ISTimedActionQueue.add(newAction);
end
if not self.usingSpear then
if newAction then
if caughtFish then
newAction.stage = "reel";
-- print(" - TRIGGER: strike (newcast & caughtfish)")
else
newAction.stage = "cast";
-- print(" - TRIGGER: cast (newcast & nocaught)")
end
else
if caughtFish then
self.character:SetVariable("FishingStage","strikeEnd");
-- print(" - TRIGGER: strikeEnd (nonewcast & caughtfish)")
else
self.character:SetVariable("FishingFinished","true");
-- print(" - TRIGGER: FishingFinished = true (nonewcast & nocaught)")
end
end
else
if newAction then
if caughtFish then
newAction.stage = "spearStrike";
-- print(" - TRIGGER: strike (newcast & caughtfish)")
else
newAction.stage = "spearIdle";
-- print(" - TRIGGER: cast (newcast & nocaught)")
end
else
if caughtFish then
self.character:SetVariable("FishingStage","spearStrike");
-- print(" - TRIGGER: strikeEnd (nonewcast & caughtfish)")
else
self.character:SetVariable("FishingFinished","true");
-- print(" - TRIGGER: FishingFinished = true (nonewcast & nocaught)")
end
end
end
-- needed to remove from queue / start next.
ISBaseTimedAction.perform(self);
end
-- get a fish by the number
-- if plastic lure : 15/100 it's a big, 25/100 medium and 60/100 it's a little/lure fish
-- if living lure : 20/100 it's a big, 30/100 it's a medium and 50/100 it's a little/lure fish
function ISFishingAction:getFish()
local fishItem = nil;
local minRoll = 100-(8*self.fishingLvl);
local fishSizeNumber = ZombRand(minRoll * 100) / 100;
local fishSizeThreshold = {};
local fish = {};
-- we gonna determine the fish size and give player's xp
-- first, if we have a plastic lure
if self.plasticLure then
if fishSizeNumber <= 1 then --vanilla 3
fish.size = "Prize";
fishSizeThreshold = {1,0};
--self.character:getXp():AddXP(Perks.Fishing, 10);
elseif fishSizeNumber <= 15 then
fish.size = "Big";
fishSizeThreshold = {15,2};
--self.character:getXp():AddXP(Perks.Fishing, 7); -- 13 - 15
elseif fishSizeNumber <= 55 then
fish.size = "Medium";
fishSizeThreshold = {55,16};
--self.character:getXp():AddXP(Perks.Fishing, 5); -- 7-8
else
fish.size = "Small";
fishSizeThreshold = {100,56};
--self.character:getXp():AddXP(Perks.Fishing, 3); -- 5-6
end
else -- living lure size
if fishSizeNumber <= 2 then --vanilla 5
fish.size = "Prize";
fishSizeThreshold = {2,0};
--self.character:getXp():AddXP(Perks.Fishing, 10);
elseif fishSizeNumber <= 20 then
fish.size = "Big";
fishSizeThreshold = {20,3};
--self.character:getXp():AddXP(Perks.Fishing, 7);
elseif fishSizeNumber <= 65 then
fish.size = "Medium";
fishSizeThreshold = {65,21};
--self.character:getXp():AddXP(Perks.Fishing, 5);
else
fish.size = "Small";
fishSizeThreshold = {100,66};
--self.character:getXp():AddXP(Perks.Fishing, 3);
end
end
local gainedXP = 1;
fish.fish = self:getFishByLure();
if fish.fish.name then -- if no name then it's a "trash" item
-- then we may broke our line
if not self:brokeLine(fish) then
-- we gonna create our fish
fishItem, gainedXP = self:createFish(fish, fish.fish, fishSizeNumber, fishSizeThreshold);
-- getSoundManager():PlayWorldSound("getFish", false, self.character:getSquare(), 1, 20, 1, false)
self.character:playSound("CatchFish");
addSound(self.character, self.character:getX(), self.character:getY(), self.character:getZ(), 20, 1)
end
else
fishItem = InventoryItemFactory.CreateItem(fish.fish.item);
if not fishItem then
print("Item "..fish.fish.item.." from TrashLoot could not be created.")
HaloTextHelper.addText(self.character, "Hmm, nothing. Some modder may have screwed up...")
return {}
end
if fishItem:getCondition() and fishItem:getCondition() > 0 then
fishItem:setCondition(ZombRand(1,fishItem:getConditionMax()/2));
end
local inv = self:getUsedInventory(fishItem);
inv:AddItem(fishItem);
if not self.usingSpear then
-- getSoundManager():PlayWorldSound("getFish", false, self.character:getSquare(), 1, 20, 1, false)
self.character:playSound("CatchTrashWithRod");
addSound(self.character, self.character:getX(), self.character:getY(), self.character:getZ(), 20, 1)
end
if fish.fish.xp then
gainedXP = fish.fish.xp;
else
gainedXP = FishingPlus:getXpFromRoll(100); -- should be worst possible catch xp (TEST: 100 vs minRoll)
end
end
-- gain XP
print("Fishing caught: "..(fish.fish.name or fish.fish.item).." | fishSizeNumber: "..fishSizeNumber.." | gainedXP: "..gainedXP)
local currentXP = self.character:getXp():getXP(Perks.Fishing);
self.character:getXp():AddXP(Perks.Fishing, gainedXP);
gainedXP = self.character:getXp():getXP(Perks.Fishing) - currentXP;
gainedXP = string.format("%.2f", gainedXP);
local holotext = "[col=137,232,148]"..Perks.Fishing:getName().." "..getText("Challenge_Challenge2_CurrentXp", gainedXP) .. "[/] [img=media/ui/ArrowUp.png]"
HaloTextHelper.addText(self.character, holotext)
-- remove the lure
if not self.plasticLure and self.character:getSecondaryHandItem() then
self.character:getSecondaryHandItem():Use();
self.character:setSecondaryHandItem(nil);
end
if self.fishingUI then
self.fishingUI:setFish(fishItem);
end
return fish;
end
-- Visual ratio of fish caught and trash caught using player level
function ISFishingAction:getFishByLure()
local item = 0;
local MaxTrashRate = 0.4;
local MinTrashRate = 0.15;
local DampingConstant = 0.3;
local trashRate = MaxTrashRate;
for i = 0,self.fishingLvl do
trashDelta = trashRate - MinTrashRate
trashRate = trashRate - (trashDelta*DampingConstant)
end
if ZombRandFloat(0.0,1.0) < trashRate then
local trashItemConfig = SandboxVars.FishingPlus.TrashItemConfig or 1;
if trashItemConfig == 2 then
item = Fishing.trashItems[ZombRand(#Fishing.trashItems) + 1];
else
item = FishingPlus:getTrashItem(self.fishingLvl);
end
else
item = Fishing.fishes[ZombRand(#Fishing.fishes) + 1];
for i,v in ipairs(item.lure) do
if (self.lure and v == self.lure:getType()) or self.usingSpear then
return item;
end
end
return self:getFishByLure(); -- (could cause stack overflow if caught in infinite loop when lure is invalid for any fish)
end
return item;
end
-- create the fish we just get
-- we randomize is weight and size according to his size
-- then we set his new name
function ISFishingAction:createFish(fishType, fish, fishSizeNumber, fishSizeThreshold)
-- local fish = Fishing.fishes[fishType.fishType];
local fishToCreate = InventoryItemFactory.CreateItem(fish.item);
local baseWeightLb = fishToCreate:getActualWeight();
local size = nil;
local maxSize = nil;
local minSize = nil;
local weightKg = nil;
local baseScale = 1;
local ancient = false;
-- now we set the size (for the name) and weight (for hunger) according to his size (little, medium and big)
if fishType.size == "Small" then
--size = ZombRand(fish.little.minSize, fish.little.maxSize);
maxSize = fish.little.maxSize;
minSize = fish.little.minSize;
size = FishingPlus:getSizeFromRoll(minSize, maxSize, fishSizeNumber, fishSizeThreshold);
weightKg = size / fish.little.weightChange;
elseif fishType.size == "Medium" then
--size = ZombRand(fish.medium.minSize, fish.medium.maxSize);
maxSize = fish.medium.maxSize;
minSize = fish.medium.minSize;
size = FishingPlus:getSizeFromRoll(minSize, maxSize, fishSizeNumber, fishSizeThreshold);
weightKg = size / fish.medium.weightChange;
baseScale = 1.2;
elseif fishType.size == "Big" then
--size = ZombRand(fish.big.minSize, fish.big.maxSize);
maxSize = fish.big.maxSize;
minSize = fish.big.minSize;
size = FishingPlus:getSizeFromRoll(minSize, maxSize, fishSizeNumber, fishSizeThreshold);
weightKg = size / fish.big.weightChange;
baseScale = 1.4;
else
--size = ZombRand(fish.big.minSize*1.5, fish.big.maxSize*1.5);
maxSize = fish.big.maxSize*1.5;
minSize = fish.big.minSize*1.5;
size = FishingPlus:getSizeFromRoll(minSize, maxSize, fishSizeNumber, fishSizeThreshold);
weightKg = size / fish.big.weightChange;
baseScale = 1.7;
ancient = size >= (maxSize - 1);
print("Ancient Roll "..size .." / ".. maxSize);
end
local scaleMod = (((size - minSize) + 1) / ((maxSize - minSize) + 1) / 2);
local nutritionConfigMulti = tonumber(SandboxVars.FishingPlus.FishNutritionFactor) or 2.2;
local nutritionFactor = nutritionConfigMulti * weightKg / baseWeightLb;
print("Create Fish ", fishType.size, size, minSize, maxSize, " % ", scaleMod, "full scale: ", (baseScale + scaleMod), " nutritionFactor: ", nutritionFactor, " (", nutritionConfigMulti, ")");
fishToCreate:setCalories(fishToCreate:getCalories() * nutritionFactor);
fishToCreate:setLipids(fishToCreate:getLipids() * nutritionFactor);
fishToCreate:setCarbohydrates(fishToCreate:getCarbohydrates() * nutritionFactor);
fishToCreate:setProteins(fishToCreate:getProteins() * nutritionFactor);
fishToCreate:setWorldScale(scaleMod + baseScale);
-- the fish name is like : Big Trout - 26cm
if not fish.noNameChange then
local prefix = fishType.size
if ancient then
local prefixes = {"Holy", "Ancient", "The One", "Legendary"};
prefix = prefixes[ZombRand(1,#prefixes)]
end
fishToCreate:setName(prefix .. " " .. fish.name .. " - " .. string.format(size) .. "cm");
end
-- hunger reduction is weight of the fish div by 6, and set it to negative
fishToCreate:setBaseHunger(- weightKg / 6);
fishToCreate:setHungChange(fishToCreate:getBaseHunger());
-- weight is kg * 2.2 (in pound)
fishToCreate:setActualWeight(weightKg * 2.2);
fishToCreate:setCustomWeight(true)
local inv = self:getUsedInventory(fishToCreate);
inv:AddItem(fishToCreate);
local xp = FishingPlus:getXpFromRoll(fishSizeNumber);
return fishToCreate, xp;
end
--------------------- TRASH LOOT GEN -----------------------
LootTable = {}
-- roll 0-100
function FishingPlus:getXpFromRoll(x)
local setting = SandboxVars.FishingPlus.XpSetting or 2;
local multi = tonumber(SandboxVars.FishingPlus.XpMultiplier) or 1.0;
-- 220/x+10 +1 || 440/x+10 +6 || 880/x+10 +12
local XP = ((220*(2^(setting-1)) / (x + 10)) + (6^(setting-1))) * multi;
return math.floor(XP * 100) / 100;
end
function FishingPlus:getSizeFromRoll(minSize, maxSize, x, max)
-- minimum fishsize plus a percentage of the maximum size difference dependent on roll (x) and max roll
local percentage = math.abs(x - max[1]) / (max[1]-max[2]);
print("Roll x "..x.." with treshold "..max[1].."-"..max[2].." results in "..(percentage * 100).."% of size.")
local result = minSize + ((maxSize - minSize) * percentage);
return math.floor(result + 0.5) -- round to nearest number
end
function FishingPlus:getTrashItem(fishingLvl)
local lootTable = FishingPlus:getTrashLoot(fishingLvl);
local totalWeight = lootTable.weight
local randomNumber = ZombRand(1, totalWeight); --math.random(1,totalWeight)
local weightIndex = 0;
for _, entry in ipairs(lootTable) do
if entry and entry.item then
weightIndex = weightIndex + entry.weight;
if randomNumber <= weightIndex then
--print(randomNumber.." - "..entry.item);
return entry;
end
end
end
end
function FishingPlus:getTrashLoot(fishingLvl)
-- Generate/Filter Loot-Table
if not LootTable.level or LootTable.level ~= fishingLvl then
LootTable = {};
local count, weight = 0, 0;
for k, v in pairs(FishingPlus.TrashItems) do
if not v.level or v.level <= fishingLvl then
count = count + 1;
weight = weight + v.weight;
LootTable[k] = v;
end
end
LootTable.weight = weight;
LootTable.level = fishingLvl;
print("Generated new LootTable for level "..fishingLvl.." with "..count.." items.")
end
return LootTable;
end
function FishingPlus:printLootTablePercentages(fishingLvl)
local lootTable = FishingPlus:getTrashLoot(fishingLvl)
table.sort(lootTable, function (a, b) return (a and b and a.weight and b.weight and a.weight > b.weight) end)
local proof = 0
for _, entry in pairs(lootTable) do
if entry and entry.item then
local percentage = (entry.weight / lootTable.weight) * 100
proof = proof + percentage
print(string.format("%.2f", percentage).."%".." - "..entry.item);
else
print(_.." - "..tostring(entry));
end
end
print("Total percentage: "..proof);
end
| 40.766949
| 194
| 0.631379
|
fe8254500cc297409375f8d82dec1011937511ee
| 665
|
lua
|
Lua
|
nvim/lua/default_theme/init.lua
|
DanCarzano/dotfiles-awesomewm
|
0742d5386648e21ea8ad9220e170859749c83964
|
[
"MIT"
] | 1
|
2022-03-31T14:27:14.000Z
|
2022-03-31T14:27:14.000Z
|
nvim/lua/default_theme/init.lua
|
DanCarzano/dotfiles-awesomewm
|
0742d5386648e21ea8ad9220e170859749c83964
|
[
"MIT"
] | null | null | null |
nvim/lua/default_theme/init.lua
|
DanCarzano/dotfiles-awesomewm
|
0742d5386648e21ea8ad9220e170859749c83964
|
[
"MIT"
] | null | null | null |
vim.cmd "hi clear"
if vim.fn.exists "syntax_on" then
vim.cmd "syntax reset"
end
vim.o.background = "dark"
vim.o.termguicolors = true
vim.g.colors_name = "default_theme"
local user_plugin_opts = require("core.utils").user_plugin_opts
local util = require "default_theme.util"
local modules = {
"base",
"treesitter",
"lsp",
"others",
}
local highlights = {}
C = require "default_theme.colors"
for _, module in ipairs(modules) do
highlights = vim.tbl_deep_extend("force", highlights, require("default_theme." .. module))
end
for group, colors in pairs(user_plugin_opts("default_theme.highlights", highlights)) do
util.highlight(group, colors)
end
| 21.451613
| 92
| 0.732331
|