patch stringlengths 18 160k | callgraph stringlengths 4 179k | summary stringlengths 4 947 | msg stringlengths 6 3.42k |
|---|---|---|---|
@@ -18,7 +18,7 @@
# OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
# ==================================================================================================
-import os
+import os,sys
import json
import time
import json_tricks
| [get_next_parameter->[,sleep,AssertionError,isfile,getsize,format,join,request_next_parameter,open,load],send_metric->[,write,print,flush,len,run],request_next_parameter->[send_metric,dumps],get,join,exists,open,makedirs,init_logger] | This function returns a base64 encoded version of a unique identifier. Reads the nexus metric metrics from the Nexus network and writes them to the. | import sys the next line |
@@ -11,9 +11,11 @@ class RClipr(RPackage):
and X11 clipboards."""
homepage = "https://github.com/mdlincoln/clipr"
- url = "https://cran.r-project.org/src/contrib/clipr_0.4.0.tar.gz"
- list_url = "https://cran.r-project.org/src/contrib/Archive/clipr"
+ url = "https://cloud.r-project.org/src/contrib/clipr_0.4.0.tar.gz"
+ list_url = "https://cloud.r-project.org/src/contrib/Archive/clipr"
+ version('0.7.0', sha256='03a4e4b72ec63bd08b53fe62673ffc19a004cc846957a335be2b30d046b8c2e2')
+ version('0.5.0', sha256='fd303f8b7f29badcdf490bb2d579acdfc4f4e1aa9c90ac77ab9d05ce3d053dbf')
version('0.4.0', '4012a31eb3b7a36bd3bac00f916e56a7')
depends_on('r-rstudioapi', type=('build', 'run'))
| [RClipr->[depends_on,version]] | Simple utility functions to read from and write to the Windows OS X and Clipboard. | `r-rstudioapi` and `r-testthat` are suggested dependencies, not real dependencies. They should be removed. |
@@ -564,7 +564,7 @@ static float run_z_probe() {
}
#endif
- return current_position[Z_AXIS] + zprobe_zoffset;
+ return current_position[Z_AXIS];
}
/**
| [No CFG could be retrieved] | Runs a single Z probe. Move the probe down to the next z position and then move up to make sure that the. | Will removing `+ zprobe_zoffset` work for Cartesian machines? |
@@ -270,7 +270,8 @@ void KeepKeyPromises(const char *public_key_file, const char *private_key_file)
Log(LOG_LEVEL_VERBOSE, "Writing private key to '%s'", private_key_file);
- if (!PEM_write_RSAPrivateKey(fp, pair, cipher, passphrase, strlen(passphrase), NULL, NULL))
+ if (!PEM_write_RSAPrivateKey(fp, pair, cipher, (void *)passphrase,
+ strlen(passphrase), NULL, NULL))
{
err = ERR_get_error();
Log(LOG_LEVEL_ERR, "Couldn't write private key. (PEM_write_RSAPrivateKey: %s)", ERR_reason_error_string(err));
| [No CFG could be retrieved] | Creates a new key pair and writes it to the private key file. Private function that writes the public key to a file. | Again with the const-ness :-( |
@@ -56,7 +56,7 @@ import java.util.stream.Collectors;
public class NullDereferenceCheck extends SECheck {
private static final ExceptionalYieldChecker EXCEPTIONAL_YIELD_CHECKER = new ExceptionalYieldChecker(
- "NullPointerException will be thrown when invoking method %s().");
+ "\"NullPointerException\" will be thrown when invoking method \"%s()\".");
private static final String JAVA_LANG_NPE = "java.lang.NullPointerException";
| [NullDereferenceCheck->[reportIssue->[reportIssue],checkPostStatement->[NullDereferenceIssue],reportIssues->[reportIssue],checkConstraint->[NullDereferenceIssue]]] | Imports the given SECheck object. Construct a new object that can be used to determine if a node is a symbolic value. | I think `OptionalGetBeforeIsPresentCheck` message ought to be updated too, if we want to have quotes around exception and method. |
@@ -676,6 +676,6 @@ func (db *DB) Commit(deleteEmptyObjects bool) (root common.Hash, err error) {
}
return nil
})
- log.Debug("Trie cache stats after commit", "misses", trie.CacheMisses(), "unloads", trie.CacheUnloads())
+ //log.Debug("Trie cache stats after commit", "misses", trie.CacheMisses(), "unloads", trie.CacheUnloads())
return root, err
}
| [getStateObject->[setError,Error],Commit->[Commit,clearJournalAndRefund,deleteStateObject,updateStateObject],SetNonce->[SetNonce],SetBalance->[SetBalance],createObject->[getStateObject,setStateObject],SetCode->[SetCode],AddBalance->[AddBalance],SetState->[SetState],updateStateObject->[setError],Finalise->[updateStateObject,deleteStateObject],GetState->[GetState],deleteStateObject->[setError],IntermediateRoot->[Finalise],SubBalance->[SubBalance],GetCodeSize->[setError],ForEachStorage->[getStateObject],GetCommittedState->[GetCommittedState],GetOrNewStateObject->[getStateObject],CreateAccount->[createObject]] | Commit commits all changes made to the state trie and returns the root hash of the root object This function is called after commit to get the root node of the trie. | ? should we just remove |
@@ -271,6 +271,9 @@ class Query(graphene.ObjectType):
class Mutations(graphene.ObjectType):
+ authorization_key_add = AuthorizationKeyAdd.Field()
+ authotization_key_delete = AuthorizationKeyDelete.Field()
+
assign_navigation = AssignNavigation.Field()
token_create = CreateToken.Field()
| [Query->[resolve_customers->[resolve_customers],resolve_sales->[resolve_sales],resolve_page->[resolve_page],resolve_orders->[resolve_orders],resolve_products->[resolve_products],resolve_menu->[resolve_menu],resolve_vouchers->[resolve_vouchers],resolve_staff_users->[resolve_staff_users],resolve_categories->[resolve_categories],resolve_pages->[resolve_pages],resolve_attributes->[resolve_attributes],resolve_collections->[resolve_collections],resolve_product_types->[resolve_product_types],resolve_product_variants->[resolve_product_variants],resolve_menus->[resolve_menus],resolve_address_validator->[resolve_address_validator],resolve_order->[resolve_order],resolve_menu_items->[resolve_menu_items],resolve_shipping_zones->[resolve_shipping_zones]]] | Resolve all related objects to a single object. Get the fields of the N - ary object. | typo `authotization` -> `authorization` |
@@ -92,9 +92,7 @@ class LockfileMetadata:
)
-def calculate_invalidation_digest(
- requirements: FrozenOrderedSet[str],
-) -> str:
+def calculate_invalidation_digest(requirements: Iterable[str]) -> str:
"""Returns an invalidation digest for the given requirements."""
m = hashlib.sha256()
inputs = {
| [read_lockfile_metadata->[from_json_bytes,yield_metadata_lines],LockfileMetadata->[from_json_bytes->[LockfileMetadata,coerce]],lockfile_content_with_header->[LockfileMetadata],lockfile_metadata_header->[to_json_literal]] | Returns an invalidation digest for the given requirements. | `Iterable` is too general a type here, because the digest function requires a predictable iteration order; `Iterable` doesn't guarantee iteration order. Is there a more general type than `FrozenOrderedSet` that suits the requirements here? Alternatively, we can change the digest function to sort the requirements first and regenerate all the lockfiles and associated tests. |
@@ -112,3 +112,13 @@ class ReplaceCartLineForm(AddToCartForm):
product_variant = self.get_variant(self.cleaned_data)
return self.cart.add(product_variant, self.cleaned_data['quantity'],
replace=True)
+
+
+class CountryForm(forms.Form):
+
+ country = forms.ChoiceField(choices=COUNTRY_CODE_CHOICES, label='',
+ required=False)
+
+ def get_shipment_options(self):
+ code = self.cleaned_data['country']
+ return get_shipment_options(code)
| [ReplaceCartLineForm->[save->[get_variant]],AddToCartForm->[QuantityField]] | Replaces the selected product s quantity in the cart. | Shouldn't that be a CountryField? |
@@ -48,11 +48,11 @@ public class FileBackedGamePropertiesCache implements IGamePropertiesCache {
cache.getParentFile().mkdirs();
}
try (OutputStream os = new FileOutputStream(cache);
- ObjectOutputStream out = new ObjectOutputStream(os)) {
+ final ObjectOutputStream out = new ObjectOutputStream(os)) {
out.writeObject(serializableMap);
}
} catch (final IOException e) {
- ClientLogger.logQuietly("Failed to write game properties to cache: " + cache.getAbsolutePath(), e);
+ log.log(Level.SEVERE, "Failed to write game properties to cache: " + cache.getAbsolutePath(), e);
}
}
| [FileBackedGamePropertiesCache->[cacheGameProperties->[getValue,getName,writeObject,FileOutputStream,logQuietly,mkdirs,getCacheFile,exists,getEditableProperties,getAbsolutePath,put,ObjectOutputStream],loadCachedGamePropertiesInto->[FileInputStream,getName,ObjectInputStream,get,logQuietly,readObject,setValue,getCacheFile,exists,getEditableProperties,getAbsolutePath],getFileName->[charAt,append,toString,StringBuilder,binarySearch,length],getCacheFile->[getUserRootFolder,File,getGameName,getFileName]]] | Load the cached properties into the game data. | `final` not required. |
@@ -682,7 +682,7 @@ class Channel(object):
"""
timeout = expiration - self.block_number
- if not self.isopen:
+ if not self.can_transfer:
raise ValueError('The channel is closed.')
# the lock timeout cannot be larger than the settle timeout (otherwise
| [ChannelExternalState->[close->[close],withdraw->[withdraw],settle->[settle],update_transfer->[update_transfer]],Channel->[channel_closed->[withdraw,update_transfer],register_transfer_from_to->[distributable],state_transition->[set_settled,settle,set_closed],balance->[balance],release_lock->[release_lock],isopen->[isopen],__init__->[callback_on_settled,callback_on_closed,callback_on_opened],register_secret->[register_secret],distributable->[distributable],withdraw_lock->[release_lock],create_lockedtransfer->[distributable],create_mediatedtransfer->[create_lockedtransfer],locked->[locked],create_directtransfer->[distributable],outstanding->[locked]]] | Create a LockedTransfer message. | Please, fix the error message |
@@ -1,13 +1,9 @@
<% title "#{title} #{community_qualified_name}" %>
-<div class="blank-space"></div>
-<div class="container article">
- <div class="title">
- <h1><%= title %></h1>
- </div>
- <div class="body">
- <p>
- The production <%= title %> page is a page generated at <a href="/internal/pages">/internal/pages</a>! This one is just a placeholder.
- </p>
+<div class="crayons-layout crayons-layout--limited">
+ <div class="crayons-card text-styles text-padding">
+ <h1 class="fs-3xl s:fs-4xl l:fs-5xl fw-bold s:fw-heavy lh-tight mb-4 mt-0"><%= title %></h1>
+
+ <p>The production <%= title %> page is a page generated at <a href="/internal/pages">/internal/pages</a>! This one is just a placeholder.</p>
</div>
</div>
| [No CFG could be retrieved] | E - mails for the . | I think I'll turn page title into its own component in next PR - I've been using combination of these classes often enough to make it a component I think. |
@@ -55,11 +55,12 @@ class RawBrainVision(_BaseRaw):
triggers will be ignored. Default is 0 for backwards compatibility, but
typically another value or None will be necessary.
event_id : dict | None
- The id of the event to consider. If None (default),
- only stimulus events are added to the stimulus channel. If dict,
- the keys will be mapped to trigger values on the stimulus channel
- in addition to the stimulus events. Keys are case-sensitive.
- Example: {'SyncStatus': 1; 'Pulse Artifact': 3}.
+ The id of special events to consider - that is, events that don't
+ follow the normal Brainvision trigger format ('SXX' etc).
+ If dict, the keys will be mapped to trigger values on the stimulus
+ channel. Example: {'SyncStatus': 1; 'Pulse Artifact': 3}. If None
+ or an empty dict (default), only stimulus events are added to the
+ stimulus channel. Keys are case sensitive.
verbose : bool, str, int, or None
If not None, override default verbose level (see mne.verbose).
| [read_raw_brainvision->[RawBrainVision],_get_vhdr_info->[_check_hdr_version]] | A class constructor for the object. Reads the VHDR file and returns the file object. | SXX --> SXXX if they're always three digits |
@@ -585,9 +585,6 @@ func TestCloudFrontStructure_flattenOrigin(t *testing.T) {
if out["origin_path"] != "/" {
t.Fatalf("Expected out[origin_path] to be /, got %v", out["origin_path"])
}
- if !out["custom_origin_config"].(*schema.Set).Equal(in["custom_origin_config"].(*schema.Set)) {
- t.Fatalf("Expected out[custom_origin_config] to be %v, got %v", in["custom_origin_config"], out["custom_origin_config"])
- }
}
func TestCloudFrontStructure_expandCustomHeaders(t *testing.T) {
| [Difference,Equal,NewSet,DeepEqual,StringSlice,List,Fatalf] | TestCloudFrontStructure_expandOrigin expands the given origin object into a TestCloudFront TestCloudFrontStructure_flattenCustomHeaders - flatten a list of custom headers and flatten them. | `custom_origin_config` itself contains the `origin_ssl_protocols` set, which are not stable to test in this manner. |
@@ -278,6 +278,15 @@ static void configure_handshake_ctx(SSL_CTX *server_ctx, SSL_CTX *server2_ctx,
unsigned char *ticket_keys;
size_t ticket_key_len;
+ TEST_check(SSL_CTX_set_max_send_fragment(server_ctx,
+ test->max_fragment_size));
+ if (server2_ctx != NULL) {
+ TEST_check(SSL_CTX_set_max_send_fragment(server2_ctx,
+ test->max_fragment_size));
+ }
+ TEST_check(SSL_CTX_set_max_send_fragment(client_ctx,
+ test->max_fragment_size));
+
switch (extra->client.verify_callback) {
case SSL_TEST_VERIFY_ACCEPT_ALL:
SSL_CTX_set_cert_verify_callback(client_ctx, &verify_accept_cb,
| [do_handshake->[HANDSHAKE_RESULT_free],HANDSHAKE_RESULT->[HANDSHAKE_RESULT_new]] | Configure the handshake context. Set the TGT key and the next protocol negotiation callback. This function is called when SSL_CTX_set_alpn_protocols is called if validation is none return null ;. | need explicit ==/!= here. |
@@ -27,13 +27,13 @@ type gitlabCI struct {
// See https://docs.gitlab.com/ee/ci/variables/.
func (gl gitlabCI) DetectVars() Vars {
v := Vars{Name: gl.Name}
- v.BuildID = os.Getenv("CI_JOB_ID")
+ v.BuildID = os.Getenv("CI_PIPELINE_IID")
v.BuildType = os.Getenv("CI_PIPELINE_SOURCE")
v.BuildURL = os.Getenv("CI_JOB_URL")
v.SHA = os.Getenv("CI_COMMIT_SHA")
v.BranchName = os.Getenv("CI_COMMIT_REF_NAME")
v.CommitMessage = os.Getenv("CI_COMMIT_MESSAGE")
- v.PRNumber = os.Getenv("CI_MERGE_REQUEST_ID")
+ v.PRNumber = os.Getenv("CI_MERGE_REQUEST_IID")
return v
}
| [DetectVars->[Getenv]] | DetectVars returns a Vars object with all environment variables. | Although the previous env var had a valid value, it was a unique ID within GitLab's environment and so it looks odd when rendering it in the UI. The `CI_PIPELINE_IID` is an instance ID within the context of the current GitLab project and is expected to be a more sane number and not something like `406044007`. |
@@ -517,10 +517,10 @@ public class SnapshotManagerImpl extends MutualExclusiveIdsManagerBase implement
List<Long> ids = getIdsListFromCmd(cmd.getId(), cmd.getIds());
Ternary<Long, Boolean, ListProjectResourcesCriteria> domainIdRecursiveListProject = new Ternary<Long, Boolean, ListProjectResourcesCriteria>(cmd.getDomainId(), cmd.isRecursive(), null);
- _accountMgr.buildACLSearchParameters(caller, id, cmd.getAccountName(), cmd.getProjectId(), permittedAccounts, domainIdRecursiveListProject, cmd.listAll(), false);
- Long domainId = domainIdRecursiveListProject.first();
- Boolean isRecursive = domainIdRecursiveListProject.second();
- ListProjectResourcesCriteria listProjectResourcesCriteria = domainIdRecursiveListProject.third();
+ _accountMgr.buildACLSearchParameters(caller, id, cmd.getAccountName(), cmd.getProjectId(), permittedAccounts, domainIdRecursiveListProject, cmd.listAll(), false);
+ Long domainId = domainIdRecursiveListProject.first();
+ Boolean isRecursive = domainIdRecursiveListProject.second();
+ ListProjectResourcesCriteria listProjectResourcesCriteria = domainIdRecursiveListProject.third();
Filter searchFilter = new Filter(SnapshotVO.class, "created", false, cmd.getStartIndex(), cmd.getPageSizeVal());
SearchBuilder<SnapshotVO> sb = _snapshotDao.createSearchBuilder();
| [SnapshotManagerImpl->[takeSnapshot->[takeSnapshot,postCreateSnapshot],backupSnapshot->[backupSnapshot],findRecurringSnapshotSchedule->[listPoliciesforVolume],deleteSnapshotPolicies->[deletePolicy,getSnapshotUserId],deleteSnapshotDirsForAccount->[deleteSnapshot],allocSnapshot->[supportedByHypervisor,getSnapshotType],sendToPool->[sendToPool],revertSnapshot->[revertSnapshot],deletePoliciesForVolume->[deletePolicy,listPoliciesforVolume],deleteSnapshot->[deleteSnapshot],supportedByHypervisor->[hostSupportSnapsthotForVolume],postCreateSnapshot->[getSnapshotUserId],getSnapshotType->[getSnapshotType]]] | List snapshots based on the specified parameters. This method is used to find all resource tags that are not part of the snapshot. This method checks if there is a snapshot in the system and if so returns it. | @nvazquez What is changed here? |
@@ -144,6 +144,9 @@ type TSDBConfig struct {
// How often to check for idle TSDBs for closing. DefaultCloseIdleTSDBInterval is not suitable for testing, so tests can override.
CloseIdleTSDBInterval time.Duration `yaml:"-"`
+
+ // Positive value enables experiemental support for exemplars. 0 or less to disable.
+ MaxExemplars int `yaml:"-"`
}
// RegisterFlags registers the TSDBConfig flags.
| [String->[String],RegisterFlags->[RegisterFlags],Validate->[Validate]] | RegisterFlags registers flags for the TSDB config TSDB configuration options. | Expose it in YAML config too, please. Then run `make doc`. |
@@ -317,6 +317,11 @@ type Digest struct {
UsersFile string
}
+// Forward authentication
+type Forward struct {
+ Address string
+}
+
// CanonicalDomain returns a lower case domain with trim space
func CanonicalDomain(domain string) string {
return strings.ToLower(strings.TrimSpace(domain))
| [String->[Sprintf],Set->[FieldsFunc,Split,New,ParseFloat],MatchConstraintWithAtLeastOneTag->[Glob],MarshalText->[String],EqualFold,Contains,New,ToLower,TrimSpace,Errorf,SplitN] | type is a type that can be used to create a constraint that can be used to adds strings elem into the parser. | I think it would be great to add the possibility to connect in TLS (with an self-signed cert). |
@@ -136,5 +136,6 @@ function _video(state = VIDEO_INITIAL_MEDIA_STATE, action) {
*/
ReducerRegistry.register('features/base/media', combineReducers({
audio: _audio,
+ sounds: _sounds,
video: _video
}));
| [No CFG could be retrieved] | Register the Reducers for the audio and video. | as I said, I think this belongs in features/base/sounds, base/media is mostly concerned about what media types we use in a call. |
@@ -1025,9 +1025,13 @@ namespace ExtUI {
TERN_(HAS_FAN, thermalManager.zero_fan_speeds());
}
- bool awaitingUserConfirm() { return TERN0(HAS_RESUME_CONTINUE, wait_for_user); }
+ bool awaitingUserConfirm() { return (TERN0(HAS_RESUME_CONTINUE, wait_for_user) || getMachineBusyState() >= 3); }
void setUserConfirmed() { TERN_(HAS_RESUME_CONTINUE, wait_for_user = false); }
+ #if M600_PURGE_MORE_RESUMABLE
+ void setPauseMenuResponse(PauseMenuResponse response) { pause_menu_response = response; }
+ #endif
+
void printFile(const char *filename) {
TERN(SDSUPPORT, card.openAndPrintFile(filename), UNUSED(filename));
}
| [No CFG could be retrieved] | Set the target temperature in celsius. private static final int print_job_timer_interval = 1000 ;. | If the busy-state enum values ever change, you can be sure we'll remember to change this "3" because I just wrote it down on a post-it note. |
@@ -1,5 +1,5 @@
# -*- encoding : utf-8 -*-
-class RemovePublicBodyIdFromOutgoingMessages < !rails5? ? ActiveRecord::Migration : ActiveRecord::Migration[4.2] # 1.2
+class RemovePublicBodyIdFromOutgoingMessages < ActiveRecord::Migration[4.2] # 1.2
def self.up
remove_column :outgoing_messages, :public_body_id
end
| [RemovePublicBodyIdFromOutgoingMessages->[up->[remove_column],down->[add_column],rails5?]] | Adds the missing integer column if it does not exist. | Line is too long. [82/80] |
@@ -219,13 +219,13 @@ func (s *Server) cleanupA1UserRolesData() error {
}
func (s *Server) ensureAdminsTeamExists(ctx context.Context) (string, error) {
- resp, err := s.teamsClient.GetTeamByName(ctx, &teams.GetTeamByNameReq{Name: "admins"})
+ resp, err := s.teamsClient.GetTeam(ctx, &teams.GetTeamReq{Id: "admins"})
if err != nil {
if status.Convert(err).Code() != codes.NotFound {
return "", errors.Wrap(err, "failed to fetch team \"admins\"")
}
// create "admins" team
- resp, err := s.teamsClient.CreateTeam(ctx, &teams.CreateTeamReq{Name: "admins", Description: "TODO"})
+ resp, err := s.teamsClient.CreateTeam(ctx, &teams.CreateTeamReq{Id: "admins", Name: "TODO"})
if err != nil {
return "", errors.Wrap(err, "failed to create team \"admins\"")
}
| [cleanupA1UserRolesData->[Wrapf,Rename],DisplayName->[Sprintf],MigrateA1UserRoles->[IsNotExist,Wrapf,Info,GetUser,Sprintf,cleanupA1UserRolesData,Decode,SliceContains,Open,ensureAdminsTeamExists,Wrap,NewDecoder,AddUsers],ensureAdminsTeamExists->[CreateTeam,GetTeamByName,Convert,Wrap,Code],MigrateA1Users->[IsNotExist,CreateUserWithHashedPass,Wrapf,Info,Error,Sprintf,Decode,New,DisplayName,NewV4,String,Open,NewDecoder,Rename],NewFactory,Wrapf,SetLoggerV2,NewService,New,DialContext,NewValidator,Errorf,Open,WrapZapGRPC,NewTeamsV1Client,Wrap,NewSubjectPurgeClient] | ensureAdminsTeamExists checks if a admins team exists in the server. If it. | shouldn't the name be `admins` too? |
@@ -46,7 +46,7 @@ public class InputStreamConsumer extends Thread {
LOG.info(line);
}
} catch (IOException ioe) {
- LOG.severe(ioe.toString());
+ LOG.error("Run the InputStreamConsumer is failed:", ioe);
ioe.printStackTrace();
}
}
| [InputStreamConsumer->[captureOutput->[InputStreamConsumer]]] | Runs the process. | Could this line be removed? |
@@ -844,7 +844,7 @@ func (ss *StateSync) generateNewState(bc *core.BlockChain, worker *worker.Worker
if block == nil {
break
}
- err = ss.UpdateBlockAndStatus(block, bc, worker, false)
+ err = ss.UpdateBlockAndStatus(block, bc, worker, true)
if err != nil {
break
}
| [GetBlocks->[GetBlocks],getMaxConsensusBlockFromParentHash->[ForEachPeer],CreateSyncConfig->[CloseConnections,AddPeer],AddPeer->[IsEqual],SyncLoop->[purgeAllBlocksFromCache,getMaxPeerHeight,purgeOldBlocksFromCache,RegisterNodeInfo,ProcessStateSync],AddNewBlock->[FindPeerByHash],ProcessStateSync->[generateStateSyncTaskQueue,getConsensusHashes,generateNewState,downloadBlocks],generateNewState->[getBlockFromLastMileBlocksByParentHash,getMaxConsensusBlockFromParentHash,ForEachPeer,UpdateBlockAndStatus,getBlockFromOldBlocksByParentHash],GetBlockHashesConsensusAndCleanUp->[getHowManyMaxConsensus,cleanUpPeers],downloadBlocks->[indexes,blockHashes,GetBlocks,ForEachPeer,RemovePeer,blockHashesStr],IsSameBlockchainHeight->[getMaxPeerHeight],purgeAllBlocksFromCache->[ForEachPeer],getMaxPeerHeight->[ForEachPeer,RemovePeer],generateStateSyncTaskQueue->[ForEachPeer],IsOutOfSync->[getMaxPeerHeight],purgeOldBlocksFromCache->[ForEachPeer],getConsensusHashes->[GetBlockHashesConsensusAndCleanUp,ForEachPeer,RemovePeer],RegisterNodeInfo->[ForEachPeer,registerToBroadcast],GetMaxPeerHeight->[getMaxPeerHeight]] | generateNewState generates new state for the given chain and worker. | why change to true for verifyAllSig? |
@@ -431,6 +431,8 @@ namespace System
{
client = Registry.GetValue(clientKey, "Enabled", null);
server = Registry.GetValue(serverKey, "Enabled", null);
+ Console.WriteLine($"client {client}");
+ Console.WriteLine($"client {client}");
if (client is int c && server is int s)
{
return c == 1 && s == 1;
| [PlatformDetection->[GetTls11Support->[OpenSslGetTlsSupport],GetTls13Support->[AndroidGetSslProtocolSupport],GetTls10Support->[OpenSslGetTlsSupport],GetStaticNonPublicBooleanPropertyValue]] | Checks if the TLS 1. 3 protocol is supported on this system. | Should be server? |
@@ -324,8 +324,16 @@ public abstract class Node extends AbstractModelObject implements Reconfigurable
if(l!=null && !l.contains(this))
return CauseOfBlockage.fromMessage(Messages._Node_LabelMissing(getNodeName(),l)); // the task needs to be executed on label that this node doesn't have.
- if(l==null && getMode()== Mode.EXCLUSIVE)
- return CauseOfBlockage.fromMessage(Messages._Node_BecauseNodeIsReserved(getNodeName())); // this node is reserved for tasks that are tied to it
+ if(l==null && getMode()== Mode.EXCLUSIVE) {
+ // flyweight tasks need to get executed somewhere, if every node
+ if (!(item.task instanceof Queue.FlyweightTask && (
+ this instanceof Jenkins
+ || Jenkins.getInstance().getNumExecutors() < 1
+ || Jenkins.getInstance().getMode() == Mode.EXCLUSIVE)
+ )) {
+ return CauseOfBlockage.fromMessage(Messages._Node_BecauseNodeIsReserved(getNodeName())); // this node is reserved for tasks that are tied to it
+ }
+ }
Authentication identity = item.authenticate();
if (!getACL().hasPermission(identity,Computer.BUILD)) {
| [Node->[getACL->[getACL],getChannel->[getChannel,toComputer],getSelfLabel->[getNodeName],hasPermission->[hasPermission],createPath->[getChannel],checkPermission->[checkPermission],canTake->[canTake,getMode,getNodeName]]] | Checks if this node can take the given task. | Seems like too much boolean logic to not have any tests. |
@@ -79,6 +79,11 @@ func (m *Manager) BuildHandlers(rootCtx context.Context, entryPoints []string) m
return entryPointHandlers
}
+type nameAndConfig struct {
+ routerName string // just so we have it as additional information when logging
+ TLSConfig *tls.Config
+}
+
func (m *Manager) buildEntryPointHandler(ctx context.Context, configs map[string]*runtime.TCPRouterInfo, configsHTTP map[string]*runtime.RouterInfo, handlerHTTP http.Handler, handlerHTTPS http.Handler) (*tcp.Router, error) {
router := &tcp.Router{}
router.HTTPHandler(handlerHTTP)
| [getTCPRouters->[GetTCPRoutersByEntryPoints],getHTTPRouters->[GetRoutersByEntryPoints],buildEntryPointHandler->[AddRoute,ParseDomains,Warn,AddError,AddCatchAllNoTLS,Warnf,AddProviderInContext,With,Error,ParseHostSNI,Errorf,GetQualifiedName,BuildTCP,HTTPHandler,Debugf,Debug,HTTPSHandler,AddRouteTLS,Get,Str,FromContext,AddRouteHTTPTLS],BuildHandlers->[Str,FromContext,Error,getTCPRouters,With,getHTTPRouters,buildEntryPointHandler]] | BuildHandlers creates a list of handlers for the given entryPoints TLSConfigName is the name of the TLS configuration that should be applied to the hostSN Add a route to the router based on the configuration. | so the rest of the code below just uses an empty defaultTLSConf? |
@@ -508,8 +508,7 @@ func printPropertyValueDiff(
if shouldPrintOld && shouldPrintNew {
if diff.Old.IsArchive() &&
- diff.New.IsArchive() &&
- !causedReplace {
+ diff.New.IsArchive() {
printArchiveDiff(
b, titleFunc, diff.Old.ArchiveValue(), diff.New.ArchiveValue(),
| [IsObject,IsAssets,ArrayValue,NewArchiveProperty,DiffLinesToChars,Assertf,BoolValue,DiffMain,IsBool,PropertyKey,IsNull,Diff,IsOutput,Strings,Color,Itoa,NewAssetProperty,IgnoreError,New,GetAssets,ParseReference,StableKeys,Assert,Len,NumberValue,Prefix,IsComputed,TypeString,TrimSpace,ID,Failf,TypeOf,IsAsset,IsText,AssetValue,GetPath,DiffCharsToLines,Split,IsString,StringValue,IsURI,IsArchive,GetURI,Sprintf,ObjectValue,IsNumber,ArchiveValue,IsArray,RawPrefix,Keys,String,WriteString,URN,MassageIfUserProgramCodeAsset] | elemTitleFunc prints the i - th element of a branch. Check if the object has a primitive value. | note: this !causedReplace line was something that was here since some of the earliest days of pulumi. I never touched it because i wasn't certain what impact it might have. Now that i've run into this issue first hand, i def think this is something we do *not* want, and we should essentially always print two archives as a diff, regardless of what the cause was. |
@@ -0,0 +1,13 @@
+package org.infinispan.server.test.category;
+
+import org.junit.experimental.categories.Category;
+
+/**
+ * {@link Category} tag for local hot rod tests in domain mode.
+ *
+ * @author mgencur
+ *
+ */
+public class HotRodLocalDomain extends ClientLocal {
+
+}
| [No CFG could be retrieved] | No Summary Found. | Could you tell me what does it mean a local domain? |
@@ -265,7 +265,8 @@ class TorchRankerAgent(TorchAgent):
@staticmethod
def _find_match(cand_vecs, label_vec):
- return ((cand_vecs == label_vec).sum(1) == cand_vecs.size(1)).nonzero()[0]
+ matches = ((cand_vecs == label_vec).sum(1) == cand_vecs.size(1)).nonzero()
+ return matches[0] if (len(matches.tolist()) > 0) else None
def share(self):
"""Share model parameters."""
| [TorchRankerAgent->[add_cmdline_args->[add_cmdline_args],eval_step->[score_candidates],train_step->[score_candidates]]] | Find match. | do you really need the call to tolist()? len should work on the tensor directly and be faster, no? |
@@ -7,7 +7,7 @@ using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Xunit;
-using Utf8MemoryStream = System.Text.Json.Tests.Serialization.CollectionTests.Utf8MemoryStream;
+using Utf8MemoryStream = System.Text.Json.Serialization.Tests.CollectionTests.Utf8MemoryStream;
namespace System.Text.Json.Serialization.Tests
{
| [StreamTests_DeserializeAsyncEnumerable->[DegenerateQueueConverterFactory->[DegenerateQueueConverter->[Read->[Read]]]]] | This class is used to read an array of SimpleTestClass objects from a persistent memory stream Reads an async enumeration from a stream. | FWIW the `System.Text.Json.Tests.Serialization` namespace has crept up in quite a few other places as well. |
@@ -123,4 +123,13 @@ public class OperationReturnTypeModelValidator implements ExtensionModelValidato
throw new IllegalOperationModelDefinitionException(format("Operation '%s' in Extension '%s' is missing a return type",
operationModel.getName(), model.getName()));
}
+
+ private void validateGenerics(ExtensionModel extensionModel, NamedObject namedObject, OperationModel operationModel,
+ List<TypeGeneric> generics, Class returnType, ProblemsReporter problemsReporter) {
+ if (generics.get(0).getConcreteType().isSameType(Void.class) &&
+ !generics.get(1).getConcreteType().isSameType(Void.class)) {
+ problemsReporter.addWarning(new Problem(namedObject, format(INVALID_GENERICS_ERROR_MESSAGE, operationModel.getName(),
+ extensionModel.getName(), returnType.getName())));
+ }
+ }
}
| [OperationReturnTypeModelValidator->[validateForbiddenTypesReturnType->[addError,getName,Problem,format,ifPresent],validateResultReturnType->[addError,getName,Problem,isAssignableTo,format,isEmpty],missingReturnTypeException->[format,IllegalOperationModelDefinitionException,getName],validate->[walk],validateNonBlockingCallback->[addError,getName,Problem,format,isEmpty,ifPresent],validateMessageCollectionsReturnType->[addError,getGenerics,getName,getConcreteType,Problem,isAssignableTo,format,isEmpty],of]] | Throw an IllegalOperationModelDefinitionException if the given operation has a missing return type. | why is the `namedObject` parameter needed? Seems redundant. This method should operate over the operationModel entirely |
@@ -17,9 +17,9 @@ import org.fao.geonet.domain.UserGroupId;
import org.fao.geonet.exceptions.ServiceNotAllowedEx;
import org.fao.geonet.kernel.SvnManager;
import org.fao.geonet.kernel.datamanager.IMetadataOperations;
+import org.fao.geonet.kernel.datamanager.IMetadataUtils;
import org.fao.geonet.kernel.setting.SettingManager;
import org.fao.geonet.kernel.setting.Settings;
-import org.fao.geonet.repository.MetadataRepository;
import org.fao.geonet.repository.OperationAllowedRepository;
import org.fao.geonet.repository.UserGroupRepository;
import org.fao.geonet.repository.UserRepository;
| [BaseMetadataOperations->[setOperation->[setOperation],unsetOperation->[checkOperationPermission,unsetOperation],copyDefaultPrivForGroup->[setOperation]]] | package for import in the FAO package Creates a base metadata operations class that implements the IMetadataOperations interface. | Add file header |
@@ -80,13 +80,15 @@ public class SymbolicEvaluator {
private int currentExecutionStateCount;
- public Map<Tree, SymbolicBooleanConstraint> evaluateMethod(ExecutionState state, MethodTree tree) {
+ public Map<Tree, SymbolicBooleanConstraint> evaluateMethod(JavaFileScannerContext context, ExecutionState state, MethodTree tree) {
result.clear();
if (tree.block() != null) {
try {
evaluateStatement(ImmutableList.of(state), tree.block());
} catch (SymbolicExecutionException e) {
- LOGGER.info("analysis of " + tree + " failed: " + e.getMessage(), e);
+ JavaTree javaTree = (JavaTree) tree;
+ LOGGER.info("in " + context.getFile() + ": analysis of " + tree.simpleName().name() + " at line " + javaTree.getLine() + " failed: " + e.getMessage());
+ LOGGER.debug(e);
result.clear();
}
}
| [SymbolicEvaluator->[PackedStates->[splitUnknowns->[add,instantiateExecutionState],getBooleanConstraint->[isAlwaysFalse,isAlwaysTrue]],ExpressionVisitor->[visitIdentifier->[extractVariableSymbol],evaluateRelationalOperator->[retrieveSymbolicValue],visitMemberSelectExpression->[visitMemberSelectExpression,isSuperOrThisMemberSelect],visitMethodInvocation->[visitMethodInvocation],visitUnaryExpression->[extractVariableSymbol,visitUnaryExpression],visitAssignmentExpression->[extractVariableSymbol,visitAssignmentExpression],evaluateConditionalAnd->[evaluateCondition],evaluateConditionalOr->[evaluateCondition],visitArrayAccessExpression->[visitArrayAccessExpression],visitConditionalExpression->[evaluateCondition]],StatementVisitor->[visitBlock->[evaluateStatement],visitVariable->[evaluateExpression],visitReturnStatement->[evaluateExpression],visitThrowStatement->[evaluateExpression],visitIfStatement->[evaluateStatement,evaluateCondition],visitWhileStatement->[evaluateStatement,evaluateCondition],visitForStatement->[evaluateStatement,evaluateCondition],visitForEachStatement->[evaluateStatement,evaluateExpression],visitTryStatement->[evaluateStatement,instantiateExecutionState],visitDoWhileStatement->[evaluateStatement,evaluateExpression],visitSwitchStatement->[evaluateExpression,instantiateExecutionState],processCase->[evaluateStatement],visitSynchronizedStatement->[evaluateStatement,evaluateExpression],visitExpressionStatement->[evaluateExpression]],ConditionVisitor->[visitIdentifier->[extractVariableSymbol],evaluateRelationalOperator->[retrieveSymbolicValue],visitMemberSelectExpression->[isSuperOrThisMemberSelect],visitUnaryExpression->[evaluateCondition],evaluateConditionalAnd->[evaluateCondition],visitAssignmentExpression->[extractVariableSymbol,evaluateExpression],evaluateConditionalOr->[evaluateCondition],visitArrayAccessExpression->[evaluateExpression],visitConditionalExpression->[evaluateCondition]],PackedStatementStates->[instantiateWithBreakStates->[PackedStatementStates],instantiateWithStates->[PackedStatementStates],instantiate->[PackedStatementStates],addState->[add],isEmpty->[isEmpty],iterator->[iterator]]]] | Evaluate a method tree. | I would separate the message that should be logged as info from the exception that should be logged as debug as we don't want to pollute the log with a stacktrace at all times. |
@@ -236,7 +236,7 @@ public class ClassUtils extends org.apache.commons.lang.ClassUtils
return (T) primitiveTypeNameMap.get(className);
}
}
-
+
Class<?> clazz = AccessController.doPrivileged(new PrivilegedAction<Class<?>>()
{
public Class<?> run()
| [ClassUtils->[compare->[compare],isClassOnPath->[loadClass],getSatisfiableMethods->[getSatisfiableMethods,getParameterTypes],getResources->[run->[getResources]],loadClass->[run->[loadClass],loadClass],instanciateClass->[instanciateClass,loadClass],getMethod->[getMethod,getParameterTypes],getConstructor->[getParameterTypes,getConstructor],getParameterTypes->[getParameterTypes],classNameHelper->[classNameHelper],getResource->[run->[getResource]]]] | loadClass - load a class by name. | This class has only formatting changes. Remove it form the PR |
@@ -63,6 +63,9 @@ class Checkout(ModelWithMetadata):
email = models.EmailField()
token = models.UUIDField(primary_key=True, default=uuid4, editable=False)
quantity = models.PositiveIntegerField(default=0)
+ channel = models.ForeignKey(
+ Channel, null=True, related_name="checkouts", on_delete=models.SET_NULL,
+ )
billing_address = models.ForeignKey(
Address, related_name="+", editable=False, null=True, on_delete=models.SET_NULL
)
| [Checkout->[get_country->[set_country],is_shipping_required->[is_shipping_required]],CheckoutLine->[is_shipping_required->[is_shipping_required]]] | Create a queryset of models. Field instances for the n + 1 related models. MissingFields in order to get a list of neccesary fields. | Not sure that we should use `on_delete=models.SET_NULL`. In delete channel DoD we have information that all checkouts related to the channel should be deleted. We shouldn't allow to checkout without channel. |
@@ -67,6 +67,8 @@ namespace Microsoft.CSharp.RuntimeBinder.ComInterop
return base.BindSetIndex(binder, indexes, value);
}
+ [UnconditionalSuppressMessage("ReflectionAnalysis", "IL2026:RequiresUnreferencedCode",
+ Justification = "This whole class is unsafe. Constructors are marked as such.")]
private DynamicMetaObject BindComInvoke(ComMethodDesc method, DynamicMetaObject[] indexes, CallInfo callInfo, bool[] isByRef)
{
Expression callable = Expression;
| [DispCallableMetaObject->[BindingRestrictions->[DispatchComObject,Constant,DispId,Equal,nameof,ComTypeDesc,GetExpressionRestriction,GetProperty,GetTypeRestriction,Convert,Merge,IDispatchRestriction,Property],DynamicMetaObject->[DispatchComObject,Value,BindComInvoke,Restrictions,BindGetOrInvoke,LimitType,TryGetMemberMethod,BindGetIndex,TryGetPropertySetterExplicit,Expression,Invoke,TryGetPropertySetter,ProcessArgumentsForCom,BindSetIndex,HasValue,Convert,MemberName,CallInfo,AddLast,BindInvoke,Block,TryGetMemberMethodExplicit],Empty]] | Override this method to handle the set - index invocation. | Since this method is private and only called from other methods that are suppressing IL2026, this feels like you should mark this method as `RequiresUnreferencedCode`. |
@@ -1574,11 +1574,8 @@ static long dgram_sctp_ctrl(BIO *b, int cmd, long num, void *ptr)
}
int BIO_dgram_sctp_notification_cb(BIO *b,
- void (*handle_notifications) (BIO *bio,
- void
- *context,
- void *buf),
- void *context)
+ BIO_dgram_sctp_notification_handler_fn handle_notifications,
+ void *context)
{
bio_dgram_sctp_data *data = (bio_dgram_sctp_data *) b->ptr;
| [No CFG could be retrieved] | Returns the size of the unspecific bio in the system. SCTP_DUMP - During SCTP SENDER_DRY event. | I seem to recall that there are problems with passing a function pointer via a "void *" in some compilers. @openssl - can anyone remind me of the issue? |
@@ -6,8 +6,10 @@ from json import JSONEncoder
from babel.numbers import get_territory_currencies
from django import forms
from django.conf import settings
-from django.core.paginator import EmptyPage, PageNotAnInteger, Paginator
+from django.core.paginator import Paginator, InvalidPage
+from django.http import Http404
from django.utils.encoding import iri_to_uri, smart_text
+from django.utils.translation import pgettext_lazy
from django_countries import countries
from django_countries.fields import Country
from django_prices_openexchangerates import exchange_currency
| [to_local_currency->[exchange_currency,isinstance],get_client_ip->[split,get],get_currency_for_country->[get_territory_currencies,len],get_user_shipping_country->[is_authenticated],build_absolute_uri->[iri_to_uri,get_current,urljoin],get_paginator_items->[Paginator,page],get_country_by_ip->[Country,get],CategoryChoiceField->[label_from_instance->[getattr,max,smart_text]],serialize_decimal->[str,isinstance,default],create_superuser->[save,set_password,get_or_create],reader] | Return a list of categories with a single value for the category. Get the country for a given IP address. | Please use `isort` |
@@ -43,6 +43,9 @@ func (f *GitlabDownloaderFactory) Match(opts base.MigrateOptions) (bool, error)
if strings.EqualFold(u.Host, "gitlab.com") && opts.AuthUsername != "" {
matched = true
}
+ if strings.EqualFold(u.Fragment, "gitlab") && opts.AuthUsername != "" {
+ matched = true
+ }
return matched, nil
}
| [GetReleases->[convertGitlabRelease],GetTopics->[New],GetMilestones->[New],GetRepoInfo->[New],GetLabels->[New]] | Match checks if the given options match the gitlab downloader. | For self-hosted gitlab, we need a new option on the UI to let user select. |
@@ -227,6 +227,14 @@ public class AetherClassPathClassifier implements ClassPathClassifier {
ZIP_EXTENSION));
}).collect(toList());
+ if (MODULE.equals(rootArtifactType)) {
+ File rootArtifactOutputFile = resolveRootArtifactFile(context.getRootArtifact());
+ if (rootArtifactOutputFile == null) {
+ throw new IllegalStateException("rootArtifact identified as MODULE but doesn't have an output");
+ }
+ containerUrls.add(toUrl(rootArtifactOutputFile));
+ }
+
resolveSnapshotVersionsToTimestampedFromClassPath(containerUrls, context.getClassPathURLs());
return containerUrls;
| [AetherClassPathClassifier->[discoverDependency->[findDirectDependency]]] | Builds a list of container urls based on the provided dependencies. Finds all the container urls that are not POM files or ZIP files. | an output > an output file? |
@@ -11,8 +11,7 @@ class Uploader(object):
def post(self, url, content):
self.output.info("")
- it = upload_in_chunks(content, self.chunk_size, self.output)
- return self.requester.put(url, data=IterableToFileAdapter(it), verify=self.verify)
+ return self.requester.put(url, data=content, verify=self.verify)
class Downloader(object):
| [print_progress->[rewrite_line],progress_units->[int],Uploader->[post->[info,upload_in_chunks,put,IterableToFileAdapter]],chunker->[len,xrange],IterableToFileAdapter->[read->[next],__init__->[iter,len],__iter__->[__iter__]],Downloader->[download->[print_progress,progress_units,int,ConanException,append,get,len,iter_content,join]],upload_in_chunks->[__init__->[chunker,len],__iter__->[enumerate,progress_units,print_progress]]] | Post a file to the specified URL. | TODO: check if not having upload progress bar is an issue |
@@ -258,6 +258,14 @@ public class KsqlConfig extends AbstractConfig {
= "The maximum amount of pull query bandwidth in megabytes allowed over"
+ " a period of one hour. Once the limit is hit, queries will fail immediately";
+ public static final String KSQL_QUERY_PUSH_SCALABLE_MAX_HOURLY_BANDWIDTH_MEGABYTES_CONFIG
+ = "ksql.query.push.scalable.max.hourly.bandwidth.megabytes";
+ public static final Integer KSQL_QUERY_PUSH_SCALABLE_MAX_HOURLY_BANDWIDTH_MEGABYTES_DEFAULT
+ = Integer.MAX_VALUE;
+ public static final String KSQL_QUERY_PUSH_SCALABLE_MAX_HOURLY_BANDWIDTH_MEGABYTES_DOC
+ = "The maximum amount of scalable push query bandwidth in megabytes allowed over"
+ + " a period of one hour. Once the limit is hit, queries will fail immediately";
+
public static final String KSQL_QUERY_PULL_THREAD_POOL_SIZE_CONFIG
= "ksql.query.pull.thread.pool.size";
public static final Integer KSQL_QUERY_PULL_THREAD_POOL_SIZE_DEFAULT = 100;
| [KsqlConfig->[ConfigValue->[isResolved->[isResolved]],buildConfigDef->[defineLegacy,defineCurrent],getKsqlStreamConfigProps->[getKsqlStreamConfigProps],buildStreamingConfig->[applyStreamsConfig],streamTopicConfigNames->[getName],getAllConfigPropsWithSecretsObfuscated->[getKsqlConfigPropsWithSecretsObfuscated],overrideBreakingConfigsWithOriginalValues->[KsqlConfig],empty->[KsqlConfig],cloneWithPropertyOverwrite->[KsqlConfig,getKsqlStreamConfigProps,buildStreamingConfig],getKsqlStreamConfigPropsWithSecretsObfuscated->[convertToObfuscatedString],resolveStreamsConfig->[ConfigValue,empty],CompatibilityBreakingConfigDef->[defineCurrent->[define],defineLegacy->[define],define->[define]],configDef,buildStreamingConfig]] | The maximum number of qps allowed for pull queries on this host. Config that enable pull queries on streams and scan over the data. | We now changed the names of the spq config prefix. It would be good to change it to v2. |
@@ -223,12 +223,14 @@ class SpecsMatcher:
the hash of the class instance in its key, and results in a very large key when used with `Specs`
directly).
"""
- tags: Tuple
- exclude_patterns: Tuple
+ tags: Tuple[str, ...]
+ exclude_patterns: Tuple[str, ...]
- def __init__(self, tags: Optional[Tuple] = None, exclude_patterns: Tuple = ()) -> None:
+ def __init__(
+ self, tags: Optional[Iterable[str]] = None, exclude_patterns: Optional[Iterable[str]] = None,
+ ) -> None:
self.tags = tuple(tags or [])
- self.exclude_patterns = tuple(exclude_patterns)
+ self.exclude_patterns = tuple(exclude_patterns or [])
@memoized_property
def _exclude_compiled_regexps(self):
| [DescendantAddresses->[address_target_pairs_from_address_families->[AddressResolutionError,all_address_target_pairs]],Specs->[__init__->[SpecsMatcher]],SpecsMatcher->[matches_target_address_pair->[_target_tag_matches,_excluded_by_pattern]],SingleAddress->[matching_address_families->[address_families_for_dir],make_glob_patterns->[globs_in_single_dir],address_target_pairs_from_address_families->[_SingleAddressResolutionError]],AscendantAddresses->[address_target_pairs_from_address_families->[all_address_target_pairs]],Spec->[address_families_for_dir->[AddressFamilyResolutionError]],SiblingAddresses->[matching_address_families->[address_families_for_dir],make_glob_patterns->[globs_in_single_dir],address_target_pairs_from_address_families->[all_address_target_pairs]]] | Initialize the object with the given tags and exclude patterns. | I changed `exclude_patterns` to be `Optional` so that it mirrors `tags` and avoids having to add `or ()` to some downstream code. It didn't seem like there was a strong reason why this wasn't done in the first place. |
@@ -23,6 +23,7 @@
#include <ipc/topology.h>
#include <config.h>
#include <errno.h>
+#include <stdbool.h>
#include <stddef.h>
#include <stdint.h>
| [ipc_platform_send_msg->[imx_mu_xcr_rmw,spin_unlock_irq,mailbox_dspbox_write,list_item_del,imx_mu_read,ipc_get,spin_lock_irq,list_item_append,platform_shared_commit,tracev_ipc,IMX_MU_xCR_GIRn],ipc_platform_get_host_buffer->[platform_shared_commit,ipc_get_drvdata],ipc_platform_complete_cmd->[imx_mu_xcr_rmw,platform_shared_commit,wait_for_interrupt,IMX_MU_xCR_GIEn,IMX_MU_xCR_GIRn],ipc_platform_poll_tx_host_msg->[imx_mu_xcr_rmw,mailbox_dspbox_write,imx_mu_read,platform_shared_commit,IMX_MU_xCR_GIRn],ipc_platform_poll_is_cmd_pending->[imx_mu_xcr_rmw,IMX_MU_xSR_GIPn,imx_mu_read,interrupt_clear,imx_mu_xsr_rmw,IMX_MU_xCR_GIEn],ipc_platform_poll_set_cmd_done->[IMX_MU_xCR_GIEn,imx_mu_xcr_rmw,IMX_MU_xCR_GIRn],ipc_platform_poll_is_host_ready->[imx_mu_xcr_rmw,IMX_MU_xSR_GIPn,imx_mu_read,interrupt_clear,imx_mu_xsr_rmw,IMX_MU_xCR_GIEn],ipc_platform_do_cmd->[platform_shared_commit,mailbox_validate,ipc_cmd,ipc_get],void->[imx_mu_xcr_rmw,IMX_MU_xSR_GIPn,imx_mu_read,interrupt_clear,ipc_schedule_process,imx_mu_xsr_rmw,tracev_ipc,IMX_MU_xCR_GIEn],platform_ipc_init->[imx_mu_xcr_rmw,rzalloc,dma_get,interrupt_register,ipc_set_drvdata,schedule_task_init_edf,panic,platform_shared_commit,trace_ipc_error,interrupt_enable,IMX_MU_xCR_GIEn,bzero]] | Interrupt handler for all IPC messages. function to handle the command and response of a specific task. | @tlauda do we have a real usecase where checking IPC bits is not enough? |
@@ -57,8 +57,7 @@ public abstract class AbstractPlayerRulesAttachment extends AbstractRulesAttachm
// maximum number of units that can be placed in each territory.
protected int maxPlacePerTerritory = -1;
- // It would wreck most map xmls to move the rulesAttachment's to another class, so don't move them out of here
- // please!
+ // It would wreck most map xmls to move the rulesAttachment's to another class, so don't move them out of here please!
// However, any new rules attachments that are not conditions, should be put into the "PlayerAttachment" class.
protected AbstractPlayerRulesAttachment(final String name, final Attachable attachable, final GameData gameData) {
super(name, attachable, gameData);
| [AbstractPlayerRulesAttachment->[setPlacementCapturedTerritory->[getBool],setPlacementAnyTerritory->[getBool],validate->[validateNames],setNegateDominatingFirstRoundAttack->[getBool],setMovementRestrictionType->[GameParseException,thisErrorMsg,equals],setDominatingFirstRoundAttack->[getBool],setPlacementInCapitalRestricted->[getBool],setUnlimitedProduction->[getBool],setMaxPlacePerTerritory->[getInt],setPlacementPerTerritory->[getInt],getCondition->[getPlayers,getPlayerId,get,log,contains],setPlacementAnySeaZone->[getBool],setMovementRestrictionTerritories->[splitOnColon,validateNames],getPropertyMap->[build],setProductionPerXTerritories->[thisErrorMsg,GameParseException,splitOnColon,getUnitType,getInt,put]]] | Gets the condition that should be used by the given player and condition name. Get all players in the system. | Might no longer be the case |
@@ -77,7 +77,7 @@ public class SampleTest {
ResultHandle nullHandle = method.loadNull();
BranchResult branch = method.ifNull(nullHandle);
branch.trueBranch().returnValue(branch.trueBranch().load("TRUE"));
- branch.falseBranch().returnValue(branch.trueBranch().load("FALSE"));
+ branch.falseBranch().returnValue(branch.falseBranch().load("FALSE"));
}
MyInterface myInterface = (MyInterface) cl.loadClass("com.MyTest").newInstance();
Assert.assertEquals("TRUE", myInterface.transform("TEST"));
| [SampleTest->[testIfNull->[loadNull,ifNull,build,returnValue,TestClassLoader,newInstance,getMethodCreator,getClassLoader,assertEquals,transform,load],testStringTransform->[ofMethod,getName,getMethodParam,build,returnValue,TestClassLoader,newInstance,transform,invokeStaticMethod,getMethodCreator,getClassLoader,assertEquals,invokeVirtualMethod,load],testSimpleGetMessage->[assertTrue,ofMethod,getName,returnValue,build,loadClass,TestClassLoader,newInstance,invokeStaticMethod,isSynthetic,getMethodCreator,getClassLoader,assertEquals,transform],testIfStatement->[ifNonZero,ofMethod,returnValue,getMethodParam,build,newInstance,TestClassLoader,transform,getMethodCreator,getClassLoader,assertEquals,invokeVirtualMethod,load]]] | Test if null. | This test error was never detected until scope checking was introduced. |
@@ -189,15 +189,15 @@ public class GcsUtil {
* @param globExp the glob expression to expand
* @return a string with the regular expression this glob expands to
*/
- public static String globToRegexp(String globExp) {
+ public static String wildcardToRegexp(String globExp) {
StringBuilder dst = new StringBuilder();
- char[] src = globExp.toCharArray();
+ char[] src = globExp.replace("**/*", "**").toCharArray();
int i = 0;
while (i < src.length) {
char c = src[i++];
switch (c) {
case '*':
- dst.append("[^/]*");
+ dst.append(".*");
break;
case '?':
dst.append("[^/]");
| [GcsUtil->[createBucket->[shouldRetry->[shouldRetry],createBucket,createBackOff],copy->[executeBatches],enqueueGetFileSize->[onSuccess->[create],onFailure->[create],getObject,getBucket],expand->[getGlobPrefix,isGlob,isGcsPatternSupported,globToRegexp],fileSizes->[getObjects],enqueueDelete->[getObject,getBucket],create->[getObject,create],getBucket->[shouldRetry->[shouldRetry],getBucket],bucketAccessible->[bucketAccessible,createBackOff],getObject->[getObject,createBackOff],remove->[executeBatches,makeRemoveBatches],GcsUtilFactory->[create->[GcsUtil]],getGlobPrefix->[isGcsPatternSupported],enqueueCopy->[copy,getObject,getBucket],listObjects->[createBackOff],open->[getObject]]] | This method converts a glob expression to a regular expression. | I'm not sure this is quite the right interpretation here. If I ask for `f???.txt` I probably don't intend for `f/g/.txt` to be captured in the regex. Thoughts? |
@@ -7,9 +7,6 @@
* https://www.openssl.org/source/license.html
*/
-#ifndef HEADER_BSS_FILE_C
-# define HEADER_BSS_FILE_C
-
# if defined(__linux) || defined(__sun) || defined(__hpux)
/*
* Following definition aliases fopen to fopen64 on above mentioned
| [No CFG could be retrieved] | Reads and parses a 32 - bit and returns it in the array h. generic file_ctrl wrapper. | You should adjust the indent of all preprocessor lines that were guarded. |
@@ -99,6 +99,14 @@ class Core_Command extends WP_CLI_Command {
$download_url = str_replace( '.zip', '.tar.gz', $offer['download'] );
}
+ $from_version = '';
+ $includes_folder = defined( 'WPINC' ) ? WPINC : 'wp-includes';
+ if ( file_exists( $download_dir . $includes_folder . '/version.php' ) ) {
+ global $wp_version;
+ require_once( $download_dir . $includes_folder . '/version.php' );
+ $from_version = $wp_version;
+ }
+
WP_CLI::log( sprintf( 'Downloading WordPress %s (%s)...', $version, $locale ) );
$cache = WP_CLI::get_cache();
| [Core_Command->[install->[_install],get_updates->[get_download_url],_multisite_convert->[get_error_message,tables,get_error_code],download->[getMessage,get_download_url,get_download_offer,has,import],_copy_overwrite_files->[isDir,getSubPathName],_extract->[extractTo,getFileName],update->[upgrade,get_error_code,get_download_url,get_updates],verify_checksums->[load_wordpress],_rmdir->[isDir,getRealPath],multisite_install->[_install,_multisite_convert],create_initial_blog->[set_permalink_structure,insert],check_update->[display_items,get_updates],multisite_convert->[_multisite_convert]]] | Downloads a WordPress file. Downloads a WordPress archive from the server. | When would `WP_INC` not be defined? |
@@ -474,9 +474,15 @@ def kernel(step_size, n_leapfrog_steps, x, target_log_prob_fn, event_dims=(),
acceptance_probs = math_ops.exp(math_ops.minimum(0., log_potential_0 -
log_potential_1 +
kinetic_0 - kinetic_1))
+
+ # If we are skipping the MH step directly return
+ if skip_metropolis_step:
+ return new_x, acceptance_probs, -log_potential_1, -grad_1
+
accepted = math_ops.cast(
random_ops.random_uniform(array_ops.shape(acceptance_probs)) <
acceptance_probs, np.float32)
+
new_log_prob = (-log_potential_0 * (1. - accepted) -
log_potential_1 * accepted)
| [leapfrog_integrator->[potential_and_grad],chain->[potential_and_grad,_make_potential_and_grad],kernel->[potential_and_grad,_make_potential_and_grad],leapfrog_step->[potential_and_grad]] | Creates a kernel of the Hamiltonian Monte Carlo with the given parameters. Returns the tensor of the last iteration of the sequence. Gradient of the n - node in the network. HMC - HMC with a single batch of means and log probabilities. | if skip_metropolis_step: return new_x, acceptance_probs, -log_potential_1, -grad_1 Then you dont need additional ops. |
@@ -1122,6 +1122,15 @@ class DeferredGroupBy(frame_base.DeferredFrame):
requires_partition_by=partitionings.Index(),
preserves_partition_by=partitionings.Singleton()))
+ aggregate = agg
+
+ first = last = head = tail = frame_base.not_implemented_method(
+ 'order sensitive')
+
+ __len__ = frame_base.wont_implement_method('non-deferred')
+ get_group = __getitem__ = frame_base.not_implemented_method('get_group')
+ groups = property(frame_base.wont_implement_method('non-deferred'))
+
def _liftable_agg(meth):
name, func = frame_base.name_and_func(meth)
| [_unliftable_agg->[wrapper->[groupby]],DeferredDataFrame->[nunique->[nunique],fillna->[fillna],join->[_cols_as_temporary_index,join,fill_placeholders,reindex,revert],set_index->[set_index],nsmallest->[nsmallest],dot->[AsScalar],rename->[rename],nlargest->[nlargest],mode->[mode],dropna->[dropna],corr->[corr],reset_index->[reset_index],replace->[replace],merge->[set_index,merge,columns],__setitem__->[__setitem__],sort_values->[sort_values],groupby->[groupby],unstack->[unstack],shift->[shift],quantile->[quantile]],DeferredGroupBy->[agg->[agg,DeferredDataFrame]],_liftable_agg->[wrapper->[groupby]],DeferredSeries->[nlargest->[nlargest],_corr_aligned->[std],dropna->[dropna],update->[update],fillna->[fillna],corr->[corr],nsmallest->[nsmallest],std->[compute_moments->[std]],unique->[unique],_cov_aligned->[compute_co_moments->[cov]],replace->[replace]],_liftable_agg,_unliftable_agg] | A function that aggregates the DataFrame into a new DataFrame by using the function fn. | Should we consider implementing this and `groups` for categorical grouping keys? |
@@ -330,6 +330,10 @@ class modProjet extends DolibarrModules
{
global $conf, $langs;
+ if (dolibarr_del_const($this->db, 'EXPENSEREPORT_PROJECT_IS_REQUIRED', $conf->entity) < 0) {
+ return 0;
+ }
+
// Permissions
$this->remove($options);
| [modProjet->[init->[trans,escape,remove,load,_init],__construct->[fetch_object,query]]] | Initialize the Dolphin template create a document model. | What is goal of this ? When disabling and enabling a module, we should retrieve the setup before disabling. No need for this. |
@@ -55,7 +55,13 @@ module Users
def cache_active_profile
cacher = Pii::Cacher.new(current_user, user_session)
- cacher.save(current_user.user_access_key)
+ begin
+ cacher.save(current_user.user_access_key)
+ rescue Pii::EncryptionError => _err
+ current_user.active_profile.deactivate
+ properties = { user_id: current_user.uuid }
+ analytics.track_event(Analytics::PROFILE_ENCRYPTION_INVALID, properties)
+ end
end
end
end
| [SessionsController->[track_authentication_attempt->[new,downcase,track_event,uuid,find_by,present?],create->[track_authentication_attempt],active->[headers,render,alive?,debug],cache_active_profile->[save,user_access_key,new],alive?->[present?],timeout->[track_event,redirect_to,t],now->[now],skip_before_action,before_action,after_action,include]] | cache_active_profile - returns the last N key for the current user. | This is a decryption event, not an encryption event, maybe change the event name? |
@@ -17,10 +17,10 @@ class H2OAutoML(object):
:param int nfolds: Number of folds for k-fold cross-validation. Defaults to 5. Use 0 to disable cross-validation; this will also
disable Stacked Ensemble (thus decreasing the overall model performance).
- :param bool balance_classes: Balance training data class counts via over/under-sampling (for imbalanced data). Defaults to ``false``.
- :param class_sampling_factors: Desired over/under-sampling ratios per class (in lexicographic order). If not specified, sampling
+ :param bool balance_classes: Balance training data class counts via over/under-sampling (for imbalanced data). Defaults to ``false``.
+ :param class_sampling_factors: Desired over/under-sampling ratios per class (in lexicographic order). If not specified, sampling
factors will be automatically computed to obtain class balance during training. Requires balance_classes.
- :param float max_after_balance_size: Maximum relative size of the training data after balancing class counts (can be less than 1.0).
+ :param float max_after_balance_size: Maximum relative size of the training data after balancing class counts (can be less than 1.0).
Requires ``balance_classes``. Defaults to 5.0.
:param int max_runtime_secs: This argument controls how long the AutoML run will execute. Defaults to 3600 seconds (1 hour).
:param int max_models: Specify the maximum number of models to build in an AutoML run. (Does not include the Stacked Ensemble model.)
| [H2OAutoML->[predict->[predict],download_pojo->[download_pojo],download_mojo->[download_mojo]]] | A function that automates the supervised machine learning model training process. This value defaults to 0. 0001 if the dataset is at least 1 million. | @deil87 Please limit cosmetic changes for PRs. Makes it hard to see the diff. |
@@ -119,6 +119,16 @@ class FactorRange(Range):
this categorical range.
""")
+ bound_lower = Either(String, Int, help="""
+ The minimum value that the range is allowed to go to - typically used to prevent
+ the user from panning/zooming/etc away from the data.
+ """)
+
+ bound_upper = Either(String, Int, help="""
+ The max value that the range is allowed to go to - typically used to prevent
+ the user from panning/zooming/etc away from the data.
+ """)
+
def __init__(self, *args, **kwargs):
if args and "factors" in kwargs:
raise ValueError("'factors' keyword cannot be used with positional arguments")
| [DataRange->[List,Instance],Range->[Instance],Range1d->[__init__->[super,ValueError,len],Either],FactorRange->[__init__->[list,super,ValueError],List,Float,Either],DataRange1d->[Float]] | Initialize a factor range with a sequence of factors. | I'm not 100% certain this functionality is useful for categorical ranges, I think `upper` and `lower` imply an ordering, which is not the case here. |
@@ -113,7 +113,9 @@ class ExportCsv extends ExportPlugin
public function exportHeader()
{
global $what, $csv_terminated, $csv_separator, $csv_enclosed, $csv_escaped;
-
+ //Enable columns names by default for CSV
+ if($what == 'csv')
+ $GLOBALS['csv_columns'] = 'yes';
// Here we just prepare some values for export
if ($what == 'excel') {
$csv_terminated = "\015\012";
| [ExportCsv->[exportData->[query,initAlias,numFields,fieldName,fetchRow,freeResult],setProperties->[addProperty,setText,setMimeType,setExtension,setOptions,setOptionsText],__construct->[setProperties]]] | Export header of . | Hi, @rijaspk. Thanks for your contribution. Before we can merge your pull request, we would like you to make the following changes. The body of the `if` structure must be enclosed by braces and there must be a space between the `if` keyword and the parenthesis. |
@@ -28,7 +28,9 @@ import ftplib
import urlparse
import scipy
from scipy import linalg
-
+import re
+
+
logger = logging.getLogger('mne') # one selection here used across mne-python
logger.propagate = False # don't propagate (in case of multiple imports)
| [ProgressBar->[update_with_increment_value->[update]],_chunk_read->[ProgressBar],_fetch_file->[_HTTPResumeURLOpener,_fetch_file,_chunk_read_ftp_resume,_chunk_read],set_config->[get_config_path],_chunk_write->[update_with_increment_value],get_config->[get_config_path],_TempDir->[__new__->[__new__]],set_log_file->[WrapStdOut],requires_scipy_version->[check_scipy_version],_chunk_read_ftp_resume->[ProgressBar],requires_nibabel->[has_nibabel],pformat->[_FormatDict],has_nibabel,make_skipper_dec,has_command_line_tools,has_freesurfer] | This function is a utility function that returns a random object from a sequence of integers. Generate chunks from a sequence of objects. | re is from the standard library hence should be imported before numpy which should be imported before scipy (as scipy depends on numpy) |
@@ -300,10 +300,13 @@ public final class HttpRequestSessionContext
ImmutableMap.Builder<String, SelectedRole> roles = ImmutableMap.builder();
parseProperty(servletRequest, PRESTO_ROLE).forEach((key, value) -> {
SelectedRole role;
- try {
- role = SelectedRole.valueOf(value);
+ Matcher m = ROLE_PATTERN.matcher(value);
+ if (m.matches()) {
+ SelectedRole.Type type = SelectedRole.Type.valueOf(m.group(1));
+ Optional<Name> roleName = Optional.ofNullable(m.group(3)).map(Name::createNonDelimitedName);
+ role = new SelectedRole(type, roleName);
}
- catch (IllegalArgumentException e) {
+ else {
throw badRequest(format("Invalid %s header", PRESTO_ROLE));
}
roles.put(key, role);
| [HttpRequestSessionContext->[parseProperty->[splitSessionHeader]]] | Parse the list of selected roles from the servlet request. | Don't abbreviate variable names. Just call it `matcher` |
@@ -99,6 +99,17 @@ class WikiTablesWorld(World):
def get_basic_types(self) -> Set[Type]:
return types.BASIC_TYPES
+ @overrides
+ def get_valid_actions(self) -> Dict[str, List[str]]:
+ valid_actions = super().get_valid_actions()
+
+ # We just need to add a few things here that don't get added by our world-general logic.
+
+ # This one is possible because of `reverse`.
+ valid_actions['e'].append('e -> [<r,e>, r]')
+ valid_actions['d'].append('d -> [<r,d>, r]')
+ return valid_actions
+
@overrides
def get_valid_starting_types(self) -> Set[Type]:
return types.BASIC_TYPES
| [WikiTablesWorld->[_get_numbers_from_tokens->[int,append,len,str,replace,float],__init__->[list,_get_numbers_from_tokens,_map_name,str,super,set,range],_map_name->[split,startswith,replace,ParsingError,_add_name_mapping,match]]] | Get basic types. | Why does the general logic not add these? |
@@ -223,6 +223,9 @@ func NewQueryable(distributor QueryableWithFilter, stores []QueryableWithFilter,
if err != nil {
return nil, err
}
+ //Take the set tenant limits
+ //TODO When Chunk Bytes per Query Limit is created take that in here (Currently Unlimited)
+ ctx = limiter.NewPerQueryLimiterOnContext(ctx, limits.MaxSeriesPerQuery(userID), 0)
mint, maxt, err = validateQueryTimeRange(ctx, userID, mint, maxt, limits, cfg.MaxQueryIntoFuture)
if err == errEmptyTimeRange {
| [UseQueryable->[UseQueryable],LabelNames->[LabelNames],Select->[Select],LabelValues->[LabelValues]] | ChunkQuerier creates a new queryable that can be used to query a chunk of limits - limits for the query. | Let's remove the todo. We agreed to do it in a separate PR. |
@@ -882,7 +882,7 @@ def _glyph_function(glyphclass, extra_docs=None):
kwargs['global_alpha'] = kwargs['alpha']
# handle the main glyph, need to process literals
- glyph_ca = _pop_colors_and_alpha(glyphclass, kwargs)
+ glyph_ca = _pop_visuals(glyphclass, kwargs)
incompatible_literal_spec_values = []
incompatible_literal_spec_values += _process_sequence_literals(glyphclass, kwargs, source, is_user_source)
incompatible_literal_spec_values += _process_sequence_literals(glyphclass, glyph_ca, source, is_user_source)
| [_process_axis_and_grid->[_get_axis_class,_get_num_minor_ticks],_handle_legend_field->[_find_legend_item],_update_legend->[_get_or_create_legend],_pop_colors_and_alpha->[get_default_color],_handle_legend_label->[_find_legend_item],_process_tools_arg->[_tool_from_string],_glyph_function->[func->[_make_glyph,_update_legend,_pop_colors_and_alpha,_pop_renderer_args,_process_sequence_literals,_pop_legend_kwarg],_add_sigfunc_info,_get_sigfunc,_get_argspecs]] | Create a function that creates a glyph object. Returns a function that renders the missing key n - tuple. | @solstag it was happening here, presumably |
@@ -310,6 +310,9 @@ exports.rules = [
// Accessing extension-location.calculateExtensionScriptUrl().
'extensions/amp-script/0.1/amp-script.js->' +
'src/service/extension-location.js',
+ // For registering action macros.
+ 'extensions/amp-action-macro/0.1/amp-action-macro.js->' +
+ 'src/service/action-impl.js',
],
},
{
| [No CFG could be retrieved] | Extension - specific methods. This is a list of all the javascript files that are required to be included in the poly. | We should use the standard cross-binary invocation mechanism i.e. services.js. Without it, `parseActionMap` will be duplicated in both v0.js and amp-action-macro-0.1.js binaries. |
@@ -45,6 +45,7 @@ class Admin_Menu {
add_action( 'admin_menu', array( $this, 'reregister_menu_items' ), 99999 );
add_action( 'admin_enqueue_scripts', array( $this, 'enqueue_scripts' ) );
+ add_action( 'admin_enqueue_scripts', array( $this, 'dequeue_scripts' ), 20 );
}
/**
| [Admin_Menu->[add_browse_sites_link->[get_connected_user_data],add_tools_menu->[migrate_submenus],add_site_card_menu->[is_wpcom_site],add_purchases_menu->[migrate_submenus],set_site_card_menu_class->[get_site_suffix],add_posts_menu->[migrate_submenus],add_plugins_menu->[is_wpcom_site,migrate_submenus],add_stats_menu->[is_wpcom_site],add_media_menu->[migrate_submenus],add_page_menu->[migrate_submenus],add_jetpack_menu->[is_wpcom_site,add_admin_menu_separator,migrate_submenus],add_users_menu->[migrate_submenus],reregister_menu_items->[add_browse_sites_link,add_tools_menu,add_site_card_menu,get_site_suffix,add_purchases_menu,add_posts_menu,add_plugins_menu,add_stats_menu,add_media_menu,add_page_menu,add_jetpack_menu,add_users_menu,add_options_menu,add_appearance_menu,is_wpcom_site,add_my_home_menu,add_comments_menu],add_appearance_menu->[migrate_submenus],add_my_home_menu->[migrate_submenus],jetpack_parent_file->[get_site_suffix],add_comments_menu->[migrate_submenus]]] | This method is called by the constructor of the class. | Just double-checking the script is still needed in the front-end? |
@@ -1596,7 +1596,7 @@ class ContentMapper implements ContentMapperInterface
$session = $this->getSession();
$node = $session->getNodeByIdentifier($uuid);
- $this->deleteRecursively($node, $dereference);
+ $this->deleteRecursively($node, $webspaceKey, $dereference);
$session->save();
}
| [ContentMapper->[deleteRecursively->[deleteRecursively],getStructure->[getStructure],getTitle->[getPropertyData],getShadowLocale->[createPropertyTranslator],rowToArray->[load,getResourceLocator,getStructure,initializeExtensionCache,createPropertyTranslator,getLocalizedUrlsForPage,getShadowLocale],copyOrMove->[copy,move,createPropertyTranslator,loadByNode,save],setChanger->[createPropertyTranslator],saveExtension->[createPropertyTranslator,save],loadExtensionData->[load],saveStartPage->[save],getRouteNode->[getRouteNode],loadTreeByNode->[loadTreeByNode],loadByParentNode->[loadByParentNode],save->[createPropertyTranslator,save],getSession->[getSession],delete->[save],getEnabledShadowLanguages->[createPropertyTranslator],getExtensionData->[save],loadStartPage->[load],loadBreadcrumb->[createPropertyTranslator,createSql2Query],restoreHistoryPath->[save,loadByResourceLocator],getConcreteLanguages->[getEnabledShadowLanguages],getContentNode->[getContentNode],orderBefore->[save,load,orderBefore],getUrl->[getPropertyData],loadByNode->[createPropertyTranslator,getConcreteLanguages,getEnabledShadowLanguages,load],loadInternalLinkDependencies->[load],getLocalizedUrlsForPage->[createPropertyTranslator],copyLanguage->[save,load]]] | Deletes a node in the tree. | The new event needs the webspace key .. |
@@ -219,6 +219,9 @@ const OPTIONS cms_options[] = {
OPT_R_OPTIONS,
OPT_V_OPTIONS,
+
+ OPT_PARAMETERS(),
+ {"cert", 0, 0, "Recipient certs, used when encrypting"},
{NULL}
};
| [No CFG could be retrieved] | Main entry point for the CMS - related module. This function returns the first element of the stack that is not null. | perhaps `Recipient certs (optional, required and used only when encrypting)` ? |
@@ -1,13 +1,7 @@
import React from 'react'
import { fbt } from 'fbt-runtime'
-import withWallet from 'hoc/withWallet'
-
-const DownloadApp = ({ walletType }) => {
- if (walletType === 'Origin Wallet') {
- return null
- }
-
+const DownloadApp = () => {
return (
<div className="onboard-help origin-wallet">
<div className="app-image mb-3">
| [No CFG could be retrieved] | Imports a single - type . | This didn't seem to be working. |
@@ -1,6 +1,8 @@
require 'rails_helper'
describe CategoriesController do
+ let!(:admin) { Fabricate(:admin) }
+ let!(:category) { Fabricate(:category, user: admin) }
context 'index' do
| [context,to,include,update_columns,let,describe,create_post,get,not_to,before,eq,sign_in,it,require,last] | handles the pagination of categories. | Do we need to create an admin for every single test? Using the bang version of `let` means we no longer lazy evaluate the block. |
@@ -195,7 +195,8 @@ public class <%= entityClass %>ResourceIntTest <% if (databaseType == 'cassandra
@Before
public void initTest() {<% if (databaseType == 'mongodb' || databaseType == 'cassandra') { %>
- <%= entityInstance %>Repository.deleteAll();<% } %>
+ <%= entityInstance %>Repository.deleteAll();<% } if (searchEngine == 'elasticsearch') { %>
+ <%= entityInstance %>SearchRepository.deleteAll();<% } %>
<%= entityInstance %> = new <%= entityClass %>();
<%_ for (idx in fields) { _%>
<%= entityInstance %>.set<%= fields[idx].fieldInJavaBeanMethod %>(<%='DEFAULT_' + fields[idx].fieldNameUnderscored.toUpperCase()%>);
| [No CFG could be retrieved] | Sets up the object that is not initialized by the test. create - method. | shouldnt this be handled by the `<%= entityInstance %>Repository.deleteAll();` |
@@ -924,6 +924,7 @@ class Jetpack_CLI extends WP_CLI_Command {
'enqueue_wait_time' => 0,
'queue_max_writes_sec' => 10000,
'max_queue_size_full_sync' => 100000,
+ 'full_sync_send_duration' => HOUR_IN_SECONDS,
)
);
Settings::update_settings( $sync_settings );
| [Jetpack_CLI->[sync->[reset],publicize->[disconnect]]] | Syncs the table with the database. Reset all sync queues Manage sync settigns This function is called to initialize a full sync with a list of modules. Start a new full sync with WordPress. | Do we really want to do this for an hour? Seems like A LOT! |
@@ -66,6 +66,9 @@ public class ReconServer extends GenericCli {
@Override
public Void call() throws Exception {
OzoneConfiguration ozoneConfiguration = createOzoneConfiguration();
+ Configuration.addDeprecation(
+ ReconServerConfigKeys.OZONE_RECON_HTTP_KEYTAB_FILE_OLD,
+ ReconServerConfigKeys.OZONE_RECON_HTTP_KEYTAB_FILE);
ConfigurationProvider.setConfiguration(ozoneConfiguration);
injector = Guice.createInjector(new
| [ReconServer->[start->[start],join->[join],stop->[stop]]] | Initializes the Recon server. | Maybe we can add a method 'addConfigurationOverrides' in the ConfigurationProvider provider and move this logic there? When we have more deprecated keys in the future, it may be easier to maintain them there. |
@@ -119,7 +119,7 @@ public final class ResourceLeakDetector<T> {
* Returns {@code true} if resource leak detection is enabled.
*/
public static boolean isEnabled() {
- return getLevel().ordinal() > Level.DISABLED.ordinal();
+ return ENABLED;
}
/**
| [ResourceLeakDetector->[newRecord->[toString],DefaultResourceLeak->[toString->[toString],getLevel]]] | Checks if the log level is enabled. | I would be interested to know what profiling results show if `level` is made `final`. Also I'm not sure `ENABLED` can be `final` if `level` is not `final`...and if this is the case I wonder if it is worth adding another variable at all? Can we verify these assumptions with a benchmark? |
@@ -10,12 +10,7 @@ function getCsrfToken() {
return resolve(authToken);
} else if (i === 1000) {
clearInterval(waitingOnCSRF);
- var airbrake = new airbrakeJs.Client({projectId: 1, projectKey: '<%=ApplicationConfig["AIRBRAKE_API_KEY"]%>'});
- airbrake.addFilter(function(notice) {
- notice.context.environment = "<%= Rails.env %>";
- return notice;
- });
- airbrake.notify("Could no locate CSRF metatag" + JSON.stringify(localStorage.current_user));
+ Honeybadger.notify("Could not locate CSRF metatag" + JSON.stringify(localStorage.current_user));
return reject("Could not locate CSRF meta tag on the page.");
}
}, 5);
| [No CFG could be retrieved] | Get the token from the meta tag. | This probably needs a space at the end of the string to avoid joining two strings: `Could not locate CSRF meta tag ` |
@@ -477,4 +477,15 @@ Status appendLogTypeToJson(const std::string& log_type, std::string& log) {
}
return Status(0, "OK");
}
+
+void setAWSProxy(Aws::Client::ClientConfiguration& config) {
+ if (FLAGS_aws_enable_proxy) {
+ config.proxyScheme =
+ Aws::Http::SchemeMapper::FromString(FLAGS_aws_proxy_scheme.c_str());
+ config.proxyHost = FLAGS_aws_proxy_host;
+ config.proxyPort = FLAGS_aws_proxy_port;
+ config.proxyUserName = FLAGS_aws_proxy_username;
+ config.proxyPassword = FLAGS_aws_proxy_password;
+ }
+}
}
| [getInstanceIDAndRegion->[initAwsSdk],CreateHttpRequest->[CreateHttpRequest],getAWSRegion->[getAWSRegionFromProfile]] | Append log type to JSON and log log. | @fmanco if you have a suggestion about how to gate this from _outside_ of this function, I would be open to changing this logic. I looked at trying to add an optional argument to `makeAWSClient` that would support passing this in during client creation using this function, but it doesn't look like that's a great solution. thoughts? |
@@ -174,7 +174,7 @@ var forbiddenTerms = {
'cidServiceForDocForTesting': {
message: privateServiceFactory,
whitelist: [
- 'extensions/amp-analytics/0.1/cid-impl.js',
+ 'src/service/cid-impl',
],
},
'installCryptoService': {
| [No CFG could be retrieved] | Private services that should only be installed once. Private modules that provide a private service. | add back the missing `.js` will probably pass the presubmit check |
@@ -60,7 +60,8 @@ public abstract class AbstractAzureDataLakeStorageProcessor extends AbstractProc
.sensitive(true).build();
public static final PropertyDescriptor ACCOUNT_KEY = new PropertyDescriptor.Builder()
- .name("storage-account-key").displayName("Storage Account Key")
+ .name(STORAGE_ACCOUNT_KEY_PROPERTY_DESCRIPTOR_NAME)
+ .displayName("Storage Account Key")
.description("The storage account key. This is an admin-like password providing access to every container in this account. It is recommended " +
"one uses Shared Access Signature (SAS) token instead for fine-grained control with policies. " +
"There are certain risks in allowing the account key to be stored as a flowfile " +
| [AbstractAzureDataLakeStorageProcessor->[customValidate->[validateCredentialProperties]]] | The storage account name storage account key and SAS Token properties. | The property descriptor definition seems to be the same as in `AzureStorageUtils`. The same descriptors could be used here for the Account Key and SAS Token (maybe for Endpoint Override too). |
@@ -501,11 +501,13 @@ func (a *apiServer) GetLogs(request *pps.GetLogsRequest, apiGetLogsServer pps.AP
logBytes := scanner.Bytes()
msg := new(pps.LogMessage)
if err := jsonpb.Unmarshal(bytes.NewReader(logBytes), msg); err != nil {
+ msg.Message = string(logBytes)
select {
- case errCh <- err:
+ case logChs[i] <- msg:
case <-done:
+ return
}
- return
+ continue
}
// Filter out log lines that don't match on pipeline or job
| [DeletePipeline->[DeleteJob],AddShard->[pipelineWatcher,jobWatcher],jobManager->[scaleUpWorkers,numWorkers,workerServiceIP,updateJobState,InspectJob],CreatePipeline->[validatePipeline],pipelineWatcher->[setPipelineCancel,deletePipelineCancel],RestartDatum->[InspectJob],jobWatcher->[setJobCancel,deleteJobCancel],pipelineManager->[scaleDownWorkers,CreateJob,getRunningJobsForPipeline,watchJobCompletion],DeleteAll->[DeletePipeline,DeleteJob,ListJob,ListPipeline],CreateJob->[validateJob],GetLogs->[lookupRcNameForPipeline,GetLogs],watchJobCompletion->[InspectJob]] | GetLogs returns a list of logs from a given pipeline or job This function is used to get all logs from a given rc Scan reads the log file and sends it to the API server. | Per f2f discussion, just log the error in pachd before continuing |
@@ -96,7 +96,11 @@ def main():
binpath = binpath_from_arg(sys.argv[2])
# Make sure an env always has the conda symlink
try:
- conda.install.symlink_conda(join(binpath, '..'), conda.config.root_dir)
+ if len(binpath)>1 and sys.platform=='win32':
+ conda.install.symlink_conda(join(binpath[1], '..'), conda.config.root_dir)
+ else:
+ conda.install.symlink_conda(join(binpath, '..'), conda.config.root_dir)
+
except (IOError, OSError) as e:
if e.errno == errno.EPERM or e.errno == errno.EACCES:
sys.exit("Cannot activate environment {}, do not have write access to write conda symlink".format(sys.argv[2]))
| [binpath_from_arg->[prefix_from_arg],main->[binpath_from_arg,help],main] | Entry point for the command line interface. This function checks if the user provided a environment and if so checks if it has a con. | You can just use `binpath[-1]` everywhere. |
@@ -72,7 +72,7 @@ public class BackupManager implements EventHandler {
try {
localIp = InetAddress.getLocalHost().getHostAddress();
} catch (Exception e) {
- logger.warn("Get local ip failed.");
+ logger.warn("Getting local ip failed.");
}
for (String member : parameter.getBackupMembers()) {
| [BackupManager->[handleEvent->[setStatus],init->[setStatus],channelActivated->[init]]] | Initialize the keep alive. | "Failed to get local IP" For consistency. |
@@ -364,8 +364,9 @@ class Archiver:
manager.export_paperkey(args.path)
else:
if not args.path:
- self.print_error("output file to export key to expected")
- return EXIT_ERROR
+ print("No output path provided. Printing key:")
+ manager.export_paperkey(None)
+ return EXIT_SUCCESS
try:
if args.qr:
manager.export_qr(args.path)
| [main->[get_args,run,Archiver],with_repository->[decorator->[wrapper->[argument]]],Archiver->[do_debug_search_repo_objs->[print_error,print_finding],_export_tar->[item_to_tarinfo->[print_warning,item_content_stream],build_filter,print_warning,build_matcher,item_to_tarinfo],do_prune->[print_error],do_mount->[print_error],do_check->[print_error],do_extract->[build_filter,print_warning,build_matcher],_list_archive->[_list_inner,build_matcher],do_delete->[print_error],run->[_setup_topic_debugging,prerun_checks,get_func,_setup_implied_logging],_rec_walk->[print_file_status,_process_any,print_warning,_rec_walk],do_debug_dump_archive->[output->[do_indent],output],_import_tar->[print_file_status,print_warning],do_recreate->[print_error,build_matcher],_process_any->[print_warning],do_key_export->[print_error],do_benchmark_crud->[measurement_run,test_files],_info_archives->[format_cmdline],do_config->[print_error,list_config],build_parser->[define_archive_filters_group->[add_argument],define_exclusion_group->[define_exclude_and_patterns],define_borg_mount->[define_archive_filters_group,add_argument,define_exclusion_group],add_common_group,define_archive_filters_group,CommonOptions,define_borg_mount,add_argument,process_epilog,define_exclusion_group],do_key_import->[print_error],CommonOptions->[add_common_group->[add_argument->[add_argument]]],do_diff->[build_matcher,print_warning],do_debug_dump_repo_objs->[decrypt_dump],parse_args->[get_func,resolve,preprocess_args,parse_args,build_parser],do_list->[print_error],do_create->[create_inner->[print_file_status,print_error,print_warning],create_inner],with_repository],main] | Export the key for backup. | maybe it would be even better without that "print", because then the stdout output is exactly the same as borg would write to the on-disk file when a file path has been provided. |
@@ -1906,7 +1906,7 @@ class Create(PTransform):
value: An object of values for the PCollection
"""
super(Create, self).__init__()
- if isinstance(value, string_types):
+ if isinstance(value, (unicode, str, bytes)):
raise TypeError('PTransform Create: Refusing to treat string as '
'an iterable. (string=%r)' % value)
elif isinstance(value, dict):
| [Map->[_fn_takes_side_inputs,FlatMap],CombineValuesDoFn->[process->[merge_accumulators,create_accumulator,add_inputs,extract_output,apply]],Impulse->[get_windowing->[Windowing],from_runner_api_parameter->[Impulse]],Flatten->[get_windowing->[Windowing],from_runner_api_parameter->[Flatten]],CombineValues->[to_runner_api_parameter->[_combine_payload],from_runner_api_parameter->[CombineValues],expand->[PostCombineFn->[add_input->[]],ParDo],make_fn->[from_callable]],Partition->[ApplyPartitionFnFn->[process->[partition_for]],expand->[PostCombineFn->[add_input->[]],ParDo],make_fn->[CallableWrapperPartitionFn]],Create->[get_windowing->[Windowing],get_output_type->[infer_output_type],expand->[PostCombineFn->[add_input->[]],get_output_type]],FlatMap->[ParDo,CallableWrapperDoFn],CombineFn->[add_inputs->[add_input],apply->[create_accumulator,extract_output,add_inputs]],CombineGlobally->[without_defaults->[with_defaults],as_singleton_view->[_clone],expand->[PostCombineFn->[add_input->[]],from_callable,typed,Map,add_input_types,apply],with_defaults->[_clone],with_fanout->[_clone]],GroupByKey->[from_runner_api_parameter->[GroupByKey],expand->[PostCombineFn->[add_input->[]],ParDo,ReifyWindows]],WindowInto->[to_runner_api_parameter->[to_runner_api],from_runner_api_parameter->[WindowInto,from_runner_api],__init__->[Windowing,WindowIntoFn]],_combine_payload->[get_accumulator_coder],DoFn->[get_function_arguments->[get_function_arguments],_DoFnParam],Windowing->[to_runner_api->[to_runner_api],from_runner_api->[Windowing,from_runner_api]],CallableWrapperCombineFn->[add_inputs->[union],merge_accumulators->[ReiterableNonEmptyAccumulators]],ProcessContinuation->[resume->[ProcessContinuation]],CombinePerKey->[to_runner_api_parameter->[_combine_payload],from_runner_api_parameter->[CombinePerKey],make_fn->[from_callable]],ParDo->[to_runner_api_parameter->[_pardo_fn_data],make_fn->[CallableWrapperDoFn],from_runner_api_parameter->[ParDo],infer_output_type->[infer_output_type],_process_argspec_fn->[_process_argspec_fn]],_CombinePerKeyWithHotKeyFanout->[expand->[PostCombineFn->[add_input->[add_input,merge_accumulators]],PostCombineFn,CombinePerKey,Map,ParDo,PreCombineFn]],Filter->[FlatMap],CallableWrapperDoFn->[_inspect_process->[_process_argspec_fn],infer_output_type->[_strip_output_annotations]]] | Initializes a Create transform. | No need to check for both `str` and `bytes` since Python 2.7 defines `bytes == str` and on Python 3.X `unicode == str`. |
@@ -37,5 +37,9 @@ class MultiLabelRecognitionDataAnalyzer(BaseDataAnalyzer):
ignored_objects += data.multi_label == -1
for key in label_map:
print_info('{name}: {value}'.format(name=label_map[key], value=count[key]))
+ data_analyze[label_map[key]] = count[key]
for key in label_map:
print_info('ignored instances {name}: {value}'.format(name=label_map[key], value=ignored_objects[key]))
+ data_analyze['ignored_instances_' + label_map[key]] = ignored_objects[key]
+
+ return data_analyze
| [MultiLabelRecognitionDataAnalyzer->[analyze->[get,object_count,format,print_info,range,zeros_like]]] | Analyze the result of a query. | I think here should be stored list instead of each class value separately |
@@ -7,12 +7,12 @@
import copy
import os.path as op
-import matplotlib
import numpy as np
from numpy.testing import (assert_array_almost_equal, assert_array_equal,
assert_allclose, assert_equal)
import pytest
+import matplotlib.pyplot as plt
from mne.channels import (make_eeg_layout, make_grid_layout, read_layout,
find_layout)
| [test_find_layout->[_get_test_info],test_make_grid_layout->[_get_test_info]] | Package containing the values of a single object. Get test info from a single . | why do you need this? |
@@ -57,8 +57,13 @@ class UserAuthToken < ActiveRecord::Base
ips.uniq!
return false if ips.empty? # first login is never suspicious
- user_location = login_location(user_ip)
- ips.none? { |ip| user_location == login_location(ip) }
+ if user_location = login_location(user_ip)
+ ips.none? do |ip|
+ if location = login_location(ip)
+ distance(user_location, location) < SiteSetting.max_suspicious_distance_km
+ end
+ end
+ end
end
def self.generate!(user_id: , user_agent: nil, client_ip: nil, path: nil, staff: nil)
| [UserAuthToken->[lookup->[log],rotate!->[hash_token,log],is_suspicious->[login_location],generate!->[log]]] | creates a new token for a user if it doesn t exist yet. | if there are no locations shouldn't everything be suspicious? |
@@ -757,9 +757,9 @@ int do_dtls1_write(SSL *s, int type, const unsigned char *buf,
* first check if there is a SSL3_BUFFER still being written out. This
* will happen with non blocking IO
*/
- if (SSL3_BUFFER_get_left(wb) != 0) {
- OPENSSL_assert(0); /* XDTLS: want to see if we ever get here */
- return ssl3_write_pending(s, type, buf, len, written);
+ if (!ossl_assert(SSL3_BUFFER_get_left(wb) == 0)) {
+ SSLerr(SSL_F_DO_DTLS1_WRITE, ERR_R_INTERNAL_ERROR);
+ return 0;
}
/* If we have an alert to send, lets send it */
| [No CFG could be retrieved] | This function is called from the write_bytes function of the first record of type type. 0 - based version of header 0 - based version of MAC header 0 - based version of. | I gather we no longer want to keep the ssl3_write_pending() call? |
@@ -289,10 +289,7 @@ public abstract class NamespaceExtractionCacheManager
}
}
);
- if (!future.isDone()
- && !future.cancel(true)) { // Interrupt to make sure we don't pollute stuff after we've already cleaned up
- throw new ISE("Future for namespace [%s] was not able to be canceled", implDatum.name);
- }
+ future.cancel(true);
try {
latch.await();
}
| [NamespaceExtractionCacheManager->[removeNamespaceLocalMetadata->[cancelFuture],delete->[removeNamespaceLocalMetadata],schedule->[run->[run],schedule,getPostRunnable,NamespaceImplData],scheduleAndWait->[scheduleOrUpdate]]] | Cancels a future that will be cancelled if the future is done. | why remove the ISE throw here? |
@@ -508,7 +508,13 @@ func (mod *modContext) importResourceFromToken(tok string) string {
refPkgName := parts[0]
- modName := mod.tokenToResource(tok)
+ resName := ""
+ if refPkgName == "pulumi" && parts[1] == "providers" {
+ refPkgName, resName = parts[2], "Provider"
+ parts[0], parts[1], parts[2] = refPkgName, "", "Provider"
+ } else {
+ resName = mod.tokenToResource(tok)
+ }
rel, err := filepath.Rel(mod.mod, "")
contract.Assert(err == nil)
| [gen->[genHeader,add],genTypes->[addType,addTypeIf,genHeader,addResource,details,add],hasTypes->[details],importTypeFromToken->[tokenToModule],importResourceFromToken->[tokenToResource],genResource->[addType,genHeader,addResource,add,has],genFunction->[genHeader,addResource,genAwaitableType,addType],genPropertyConversionTables->[genHeader],genType->[genProperties,details,genTypeDocstring,typeString],addTypeIf->[add],genInit->[genHeader,hasTypes,submodulesExist,isEmpty],genHeader->[strings],addResource->[add],pyType->[pyType,tokenToResource],genConfig->[genHeader,addResource,addType],isEmpty->[isEmpty],typeString->[tokenToResource,details,has,add,tokenToType,typeString],gen,genPropertyConversionTables,genHeader,details,add] | importResourceFromToken imports a resource from a token. | Looks like bad merge with my change a few lines above. Happy to take this implementation, though, if you want to delete the lines above. |
@@ -682,6 +682,7 @@ class Brain(object):
with _qt_disable_paint(self.plotter):
with self._ensure_minimum_sizes():
self.show()
+ self._update()
@safe_event
def _clean(self):
| [Brain->[add_label->[_update,_iter_views,add_overlay],screenshot->[screenshot],_add_text_field->[_add_text_field],_configure_vertex_time_course->[_configure_mplcanvas,norm],toggle_interface->[_update],restore_user_scaling->[_update],set_time->[set_time_point],close->[close],_on_pick->[norm],remove_annotations->[_update,remove_overlay],enable_depth_peeling->[enable_depth_peeling],_add_volume_data->[_iter_views],clear_glyphs->[_update,_remove_vertex_glyph,_remove_label_glyph],_configure_label_time_course->[_configure_mplcanvas,plot_time_line,clear_glyphs],_update_glyphs->[_iter_views],show_view->[_update,update],_initialize_actions->[_load_icons],reset->[_update],add_annotation->[_update,_iter_views,add_overlay],update_lut->[update_overlay,update],_configure_tool_bar->[_initialize_actions,_add_text_field,_add_button],_advance->[toggle_playback],_configure_picking->[norm],reset_view->[_iter_views],_make_movie_frames->[set_time_interpolation,screenshot],__init__->[map,add_overlay,_LayeredMesh,update],remove_labels->[_update,remove_overlay],_configure_sliders->[_set_slider_style],_play->[toggle_playback],plot_time_line->[plot_time_line],_update->[update],add_data->[_update,update_overlay,update],apply_auto_scaling->[_update],show->[show],_configure_trace_mode->[_set_label_mode->[_update],_set_annot->[_update,_configure_vertex_time_course],_set_annot,_configure_vertex_time_course],set_time_point->[_update,add_overlay,update_overlay,norm],_update_auto_scaling->[update_lut],save_movie->[frame_callback->[show,update],show,toggle_interface,_save_movie],_update_fscale->[update_lut],add_foci->[_iter_views],_add_button->[_add_button]],_LayeredMesh->[_compose_overlays->[to_colors,_compute_over],update->[_compose_overlays,_update],update_overlay->[update],add_overlay->[to_colors,_compute_over,_Overlay]],_Overlay->[to_colors->[diff,norm]]] | Sets up the time viewer and the default keyboard shortcuts. Initialize the object with all the keys in the order they are selected. show a single remove all lighting and interactor objects from the plotter. | This will only happen if we call `setup_time_viewer`, which only happens if `time_viewer=True` (effectively) is used, right? This suggests that if `time_viewer=False` then the bug will still be there. It seems like we Probably need an equivalent change in very similar code in `_pyvista.py`, or some other fix, or maybe this belongs at the `Brain` level? |
@@ -107,6 +107,9 @@ class AttrSpec(HasProps):
super(AttrSpec, self).__init__(**properties)
+ if self.default is None and self.iterable is not None:
+ self.default = next(copy(iter(self.iterable)))
+
@staticmethod
def _ensure_list(attr):
"""Always returns a list with the provided value. Returns the value if a list."""
| [AttrSpec->[_create_attr_map->[_generate_items,_ensure_tuple,_setup_iterable],set_columns->[_setup_default,_ensure_list],__getitem__->[setup,_ensure_tuple],setup->[_create_attr_map,set_columns]],color->[ColorAttr],cat->[CatAttr],marker->[MarkerAttr]] | Create a lazy evaluated attribute specification. | You don't need to copy here, do you? This breaks in 2.7.. if you remove this tests should pass.. |
@@ -131,7 +131,7 @@ class Video extends Component<Props> {
render() {
return (
<video
- autoPlay = { true }
+ autoPlay = { !config.testing.noAutoPlayVideo }
className = { this.props.className }
id = { this.props.id }
ref = { this._setVideoElement } />
| [No CFG could be retrieved] | Creates a component with a video element if the video element is attached to it. Removes the association between the component s video element and the video track. | I think for the reactified files we tend to get config from redux. |
@@ -298,9 +298,10 @@ func (t *timestampOracle) getTS(leadership *election.Leadership, count uint32) (
// ResetTimestamp is used to reset the timestamp in memory.
func (t *timestampOracle) ResetTimestamp() {
+ t.tsoMux.Lock()
+ defer t.tsoMux.Unlock()
log.Info("reset the timestamp in memory")
- zero := &atomicObject{
+ t.tso = &tsoObject{
physical: typeutil.ZeroTime,
}
- atomic.StorePointer(&t.tso, unsafe.Pointer(zero))
}
| [saveTimestamp->[getTimestampPath],UpdateTimestamp->[saveTimestamp],SyncTimestamp->[saveTimestamp,loadTimestamp],ResetUserTimestamp->[saveTimestamp],loadTimestamp->[getTimestampPath]] | ResetTimestamp resets the timestamp in memory. | how about set to nil directly? |
@@ -236,7 +236,7 @@ metadata = dict(
"numba.annotations": ["*.html"],
# Various test data
"numba.cuda.tests.cudadrv.data": ["*.ptx"],
- "numba.hsa.tests.hsadrv": ["*.brig"],
+ "numba.roc.tests.hsadrv": ["*.brig"],
"numba.tests": ["pycc_distutils_usecase/*.py"],
# Some C files are needed by pycc
"numba": ["*.c", "*.h"],
| [find_packages->[rec->[rec],rec],get_ext_modules,find_packages,is_building] | The metadata for the numba package. | I don't think we are still shipping any .brig files. There is one unit test, but it references a file under `/opt/rocm'. |
@@ -9,3 +9,13 @@ from __future__ import (absolute_import, division, generators, nested_scopes, pr
def combined_dict(*dicts):
"""Combine one or more dicts into a new, unified dict (dicts to the right take precedence)."""
return {k: v for d in dicts for k, v in d.items()}
+
+
+def recursively_update(dict, dict2):
+ """dict.update but which merges child dicts (dict2 takes precedence where there's conflict)."""
+ for k, v in dict2.items():
+ if k in dict:
+ if isinstance(v, type(dict)):
+ recursively_update(dict[k], v)
+ continue
+ dict[k] = v
| [combined_dict->[items]] | Combine one or more dicts into a new unified dict. | one quick thought here is that you're stomping on the `dict` builtin with your local variable naming here, which may be confusing if/when anyone needs to update this function in the future. if you used e.g. `d` or `dict_` etc I think that'd be cleaner and you could avoid the `type(dict)` here in favor of direct use of `dict` as type. |
@@ -0,0 +1,16 @@
+/*
+Copyright (c) 2018 Uber Technologies, Inc.
+
+This source code is licensed under the MIT license found in the
+LICENSE file in the root directory of this source tree.
+*/
+/* eslint-env node */
+/* eslint-disable flowtype/require-valid-file-annotation */
+
+module.exports = {
+ TABS_EXAMPLE: 'Tabs',
+ STATEFUL_TABS_EXAMPLE: 'Stateful Tabs',
+ VERTICAL_TABS_EXAMPLE: 'Vertical Tabs',
+ STYLE_PROPS_OVERRIDES: 'Stateful tabs with overrides',
+ CONTROLLED_TABS_EXAMPLE: 'Externally controlled tabs',
+};
| [No CFG could be retrieved] | No Summary Found. | do you mind adding the two examples below? - tabs with no content (controlled in parent component) - vertical tabs with no overrides |
@@ -1139,6 +1139,14 @@ class ReviewUnlisted(ReviewBase):
subject = u'Mozilla Add-ons: %s %s signed and ready to download'
self.log_action(amo.LOG.APPROVE_VERSION)
+ if self.human_review:
+ if self.version.needs_human_review:
+ self.version.update(needs_human_review=False)
+
+ VersionReviewerFlags.objects.filter(
+ version=self.version
+ ).update(needs_human_review_by_mad=False)
+
self.notify_email(template, subject, perm_setting=None)
log.info(u'Making %s files %s public' %
| [ReviewUnlisted->[approve_latest_version->[set_files,sign_files,notify_email,log_action],block_multiple_versions->[set_files,log_action],confirm_multiple_versions->[log_action]],ReviewHelper->[set_data->[set_data]],ReviewerQueueTable->[render_addon_name->[increment_item]],ViewUnlistedAllListTable->[render_authors->[safe_substitute],render_guid->[safe_substitute],render_addon_name->[increment_item,safe_substitute]],ModernAddonQueueTable->[render_addon_name->[_get_addon_name_url]],ReviewBase->[approve_content->[log_action],approve_latest_version->[set_files,notify_email,sign_files,set_recommended,set_addon,unset_past_needs_human_review,log_action],process_comment->[log_action],confirm_auto_approved->[unset_past_needs_human_review,log_action],process_super_review->[log_action],reject_latest_version->[set_files,set_addon,notify_email,log_action],notify_about_auto_approval_delay->[notify_email],reject_multiple_versions->[set_files,notify_email,log_action]]] | Set an unlisted addon version files to public. | Might be a different issue but in `block_multiple_versions`, shouldn't we also clear the `VersionReviewerFlags`? |
@@ -694,7 +694,8 @@ public final class ExpressionFormatter
.toString();
}
- public String visitGroupingOperation(GroupingOperation node, Void context)
+ @Override
+ protected String visitGroupingOperation(GroupingOperation node, Void context)
{
return "GROUPING (" + joinExpressions(node.getGroupingColumns()) + ")";
}
| [ExpressionFormatter->[sortItemFormatterFunction->[formatExpression],Formatter->[visitSymbolReference->[formatIdentifier],visitLambdaArgumentDeclaration->[formatExpression]],formatGroupBy->[formatExpression]]] | Generates a String representation of a QuantifiedComparisonExpression. | i also changed the method to protected, hoping no-one will notice |
@@ -254,11 +254,14 @@ public class GcsUtil {
* exists.
*/
public List<GcsPath> expand(GcsPath gcsPattern) throws IOException {
- checkArgument(isGcsPatternSupported(gcsPattern.getObject()));
Pattern p = null;
String prefix = null;
- if (!isGlob(gcsPattern)) {
- // Not a glob.
+ if (isWildcard(gcsPattern)) {
+ // Part before the first wildcard character.
+ prefix = getNonWildcardPrefix(gcsPattern.getObject());
+ p = Pattern.compile(wildcardToRegexp(gcsPattern.getObject()));
+ } else {
+ // Not a wildcard.
try {
// Use a get request to fetch the metadata of the object, and ignore the return value.
// The request has strong global consistency.
| [GcsUtil->[createBucket->[shouldRetry->[shouldRetry],createBucket,createBackOff],copy->[executeBatches],enqueueGetFileSize->[onSuccess->[create],onFailure->[create],getObject,getBucket],expand->[getGlobPrefix,isGlob,isGcsPatternSupported,globToRegexp],fileSizes->[getObjects],enqueueDelete->[getObject,getBucket],create->[getObject,create],getBucket->[shouldRetry->[shouldRetry],getBucket],bucketAccessible->[bucketAccessible,createBackOff],getObject->[getObject,createBackOff],remove->[executeBatches,makeRemoveBatches],GcsUtilFactory->[create->[GcsUtil]],getGlobPrefix->[isGcsPatternSupported],enqueueCopy->[copy,getObject,getBucket],listObjects->[createBackOff],open->[getObject]]] | Finds all objects matching the given pattern. | maybe also rename `getGlobPrefix` to something like "getNonWildcardPrefix`? |
@@ -19,7 +19,12 @@ namespace Content.Server.GameObjects.Components.Explosion
public override string Name => "FlashExplosive";
private float _range;
+
private float _duration;
+ public float Duration{
+ get => _duration;
+ private set => _duration = value;
+ }
private string _sound;
private bool _deleteOnFlash;
| [FlashExplosiveComponent->[Trigger->[Explode],OnDestroy->[Explode],ExposeData->[ExposeData]]] | Override this method to expose the data of the specified object. | Just use a property with public getter and private setter. |
@@ -43,5 +43,4 @@ class RunnerIntegrationTest(PantsRunIntegrationTest):
},
})
self.assert_success(non_warning_run)
- self.assertNotIn('DEPRECATED', non_warning_run.stderr_data)
self.assertNotIn('test warning', non_warning_run.stderr_data)
| [RunnerIntegrationTest->[test_warning_filter->[_deprecation_warning_cmdline]]] | Checks that the warning is not a test warning and that the warning is not a test warning. | Should loop back to fix this. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.