patch
stringlengths 18
160k
| callgraph
stringlengths 4
179k
| summary
stringlengths 4
947
| msg
stringlengths 6
3.42k
|
|---|---|---|---|
@@ -595,8 +595,10 @@ namespace Kratos
array_1d<double, 3> b = ZeroVector(3);
b[0] = 1.0;
- const array_1d<double, 3> c = MathUtils<double>::CrossProduct(a, b);
- const array_1d<double, 3> d = MathUtils<double>::UnitCrossProduct(a, b);
+ array_1d<double, 3> c, d;
+
+ MathUtils<double>::CrossProduct(c, b, a);
+ MathUtils<double>::UnitCrossProduct(d, b, a);
KRATOS_CHECK_EQUAL(c[2], 2.0);
KRATOS_CHECK_EQUAL(d[2], 1.0);
|
[mat11->[mat11],ZeroMatrix->[ZeroMatrix],mat33->[mat33],IdentityMatrix->[IdentityMatrix],ZeroVector->[ZeroVector]]
|
Kratos test case in suite.
|
I assumed that for CrossProduct the values were inverted as well... Is that right?
|
@@ -22,8 +22,13 @@
For internal use only; no backwards-compatibility guarantees.
"""
+from __future__ import absolute_import
+
import sys
import traceback
+from builtins import next
+from builtins import object
+from builtins import zip
import six
|
[PerWindowInvoker->[invoke_process->[_find_param_with_default],__init__->[ArgPlaceholder]],_OutputProcessor->[finish_bundle_outputs->[receive],process_outputs->[receive]],DoFnRunner->[finish->[_invoke_bundle_method],process->[invoke_process,enter,exit],__init__->[create_invoker,DoFnSignature,LoggingContext],start->[_invoke_bundle_method],_invoke_bundle_method->[enter,exit]],DoFnSignature->[__init__->[MethodWrapper]]]
|
Creates a NameContext object that holds the name information for a given object. Returns a hash of the object.
|
I think we should we avoid `import six` for consistency with the approach followed elsewhere. What do you think, @RobbeSneyders ? Looks like we are using `six.reraise` in a few places and `six.text_type` in apiclient.py.
|
@@ -25,13 +25,16 @@ module View
def should_render_revenue?
revenue = @tile.revenue_to_render
+ # special case: city with multi-revenue - no choice but to draw separate revenue
+ return true if revenue.any? { |r| !r.is_a?(Numeric) }
+
return false if revenue.empty?
return false if revenue.first.is_a?(Numeric) && (@tile.cities + @tile.towns).one?
return false if revenue.uniq.size > 1
- return false if @tile.cities.sum(&:slots) < 3 && @tile.stops.size == 2
+ return false if @tile.cities.sum(&:slots) < 3 && (@tile.cities + @tile.towns).size == 2
true
end
|
[Tile->[render->[render_tile_part,should_render_revenue?]]]
|
Renders a single top - level object. if there is a node with a mismatch in any order add it to the list of children.
|
we call cities + towns . size a lot, maybe make a helper method on tiles
|
@@ -947,6 +947,7 @@ out:
D_ERROR("pool "DF_UUID" event %d failed: rc %d\n",
DP_UUID(svc->ps_uuid), src, rc);
daos_prop_fini(&prop);
+#endif
}
static void
|
[No CFG could be retrieved]
|
Get the number of objects from the rdb. dss_ult_create - creates the ult_arg object.
|
This will be removed.
|
@@ -160,6 +160,11 @@ func (r *routeBuilder) profileHandler() (request.Handler, error) {
return middleware.Wrap(h, backendMiddleware(r.cfg, r.authenticator, r.ratelimitStore, profile.MonitoringMap)...)
}
+func (r *routeBuilder) firehoseLogHandler() (request.Handler, error) {
+ h := firehose.Handler(r.batchProcessor, r.authenticator)
+ return middleware.Wrap(h, firehoseMiddleware(r.cfg, intake.MonitoringMap)...)
+}
+
func (r *routeBuilder) backendIntakeHandler() (request.Handler, error) {
requestMetadataFunc := emptyRequestMetadata
if r.cfg.AugmentEnabled {
|
[profileHandler->[Wrap,Handler],backendIntakeHandler->[Wrap,Handler,BackendProcessor],rumIntakeHandler->[Wrap,Compile,Handler],sourcemapHandler->[Wrap,Handler],rootHandler->[Wrap,Handler],Handle,HandlerFunc,TimeoutMiddleware,RequestTimeMiddleware,ResponseHeadersMiddleware,rumIntakeHandler,Wrap,handlerFn,NewHandler,CORSMiddleware,NewServeMux,RecoverPanicMiddleware,HTTPHandler,KillSwitchMiddleware,Infof,NewLogger,backendAgentConfigHandler,MonitoringMiddleware,AuthMiddleware,NewContextPool,AnonymousRateLimitMiddleware,rumAgentConfigHandler,LogMiddleware]
|
profileHandler returns a middleware that serves the profile and intake requests.
|
nit: `firehoseLogHandler` vs. `firehoseMiddleware` looks like a naming inconsistency? (`log` is not used anywhere else).
|
@@ -416,7 +416,8 @@ class GradeEntryFormsController < ApplicationController
end
end
- #If the request is a post type and the abort flag is down (operation can continue)
+ # If the request is a post type and the abort flag is down
+ # (operation can continue)
if request.post? && !abort_upload
grades_file = params[:upload][:grades_file]
begin
|
[GradeEntryFormsController->[new->[new],create->[new]]]
|
upload a single with a CSV file if there is a grade entry with this id redirect to the view.
|
Trailing whitespace detected.
|
@@ -40,6 +40,8 @@ import org.sonar.plugins.java.api.semantic.Symbol;
public class BytecodeCompleter implements JavaSymbol.Completer {
+ public static final int ASM_API_VERSION = Opcodes.ASM7_EXPERIMENTAL;
+
private Symbols symbols;
private final ParametrizedTypeCache parametrizedTypeCache;
private final SquidClassLoader classLoader;
|
[BytecodeCompleter->[getEnclosingClass->[getClassSymbol,inputStreamFor],formFullName->[formFullName],loadClass->[getClassSymbol],getClassSymbol->[getClassSymbol],constantValue->[visitField->[visitField]]]]
|
Imports the given package and class. Register a class in the class map.
|
Confirmed the issue on SQ side.
|
@@ -359,6 +359,8 @@ function annotate(fn, strictDi, name) {
* * {@link auto.$provide#service service(class)} - registers a **constructor function**, `class`
* that will be wrapped in a **service provider** object, whose `$get` property will instantiate
* a new object using the given constructor function.
+ * * {@link auto.$provide#decorator decorator(name)} - registers a **service decorator** with the
+ * {@link auto.$injector $injector}, `name` the name of the service to decorate.
*
* See the individual methods for more information and examples.
*/
|
[No CFG could be retrieved]
|
Provides a method to get the instance of a specific object. This method is used to provide a way to get the current service s result.
|
service decorator is not correct, as not only services can be decorated. You should call it **decorator function**. In the function signature, the decorator fn is also missing: decorator(name, decoratorFn)
|
@@ -158,6 +158,8 @@ def get_supported(versions=None, noarch=False):
abis.append('none')
+ arches = []
+
if not noarch:
arch = get_platform()
if sys.platform == 'darwin':
|
[get_impl_tag->[get_impl_ver,get_abbr_impl],get_abi_tag->[get_impl_ver,get_flag,get_abbr_impl,get_config_var],get_impl_ver->[get_abbr_impl,get_config_var],get_platform->[get_platform],get_supported->[get_platform,get_abi_tag,get_impl_version_info,get_abbr_impl],get_flag->[get_config_var],get_config_var->[get_config_var],get_impl_version_info->[get_abbr_impl],get_impl_tag,get_supported]
|
Returns a list of supported tags for each version. Returns a list of all possible unknown - block chains.
|
Renaming this variable `arch` and flipping the values/logic in the surrounding code would make this a bit less confusing. Double negatives FTL where avoidable
|
@@ -641,8 +641,10 @@ public class DoFnOperator<InputT, OutputT>
@Override
public final void processElement(StreamRecord<WindowedValue<InputT>> streamRecord) {
checkInvokeStartBundle();
+ long oldHold = keyCoder != null ? keyedStateInternals.minWatermarkHoldMs() : -1L;
doFnRunner.processElement(streamRecord.getValue());
checkInvokeFinishBundleByCount();
+ emitWatermarkIfHoldChanged(oldHold);
}
@Override
|
[DoFnOperator->[FlinkTimerInternals->[deleteTimerInternal->[onFiredOrDeletedTimer],deleteTimer->[deleteTimer,cancelPendingTimerById],onFiredOrDeletedTimer->[timerUsesOutputTimestamp],currentSynchronizedProcessingTime->[currentProcessingTime],currentProcessingTime->[currentProcessingTime],processPendingProcessingTimeTimers->[fireTimerInternal,checkInvokeStartBundle],currentInputWatermarkTime->[getEffectiveInputWatermark],setTimer->[setTimer],registerTimer->[onNewSdfTimer,onNewEventTimer]],onEventTime->[checkInvokeStartBundle],addSideInputValue->[addSideInputValue],createWrappingDoFnRunner->[setForWindow->[setForWindow]],MultiOutputOutputManagerFactory->[buildTaggedKvCoder->[TaggedKvCoder],create->[create]],close->[close],notifyCheckpointComplete->[notifyCheckpointComplete],dispose->[dispose],processWatermark2->[processWatermark1],onProcessingTime->[checkInvokeStartBundle],open->[getDoFn,createWrappingDoFnRunner],prepareSnapshotPreBarrier->[invokeFinishBundle],TaggedKvCoder->[decode->[decode],verifyDeterministic->[verifyDeterministic],encode->[encode]],processElement->[processElement],emitAllPushedBackData->[processElement],processWatermark1->[getEffectiveInputWatermark],initializeState->[initializeState],processElement2->[addSideInputValue],snapshotState->[invokeFinishBundle,snapshotState],setup->[setup]]]
|
Process element in ready windows.
|
Is this required on every element? I'd rather trigger this only if we set / remove a hold.
|
@@ -81,7 +81,7 @@ feature 'LOA1 Single Sign On' do
it 'user can view and confirm personal key during sign up', :js do
allow(FeatureManagement).to receive(:prefill_otp_codes?).and_return(true)
user = create(:user, :with_phone)
- code = '1234'
+ code = 'ABC1-DEF2-GHI3-JKL4'
stub_personal_key(user: user, code: code)
loa1_sp_session
|
[sign_in_and_require_viewing_personal_key->[visit,env,login_as,on_next_request],enter_personal_key_words_on_modal->[fill_in],stub_personal_key->[to,instance_double,and_return],visit,email,password,create,let,to_not,new_user_session_url,feature,join,it,fill_in_credentials_and_submit,enter_personal_key_words_on_modal,friendly_name,to,have_content,stub_personal_key,before,click_button,click_link,t,require,click,login_two_factor_path,include,have_link,sign_up_user_from_sp_without_confirming_email,sign_in_user,set_rack_session,from_issuer,click_on,match,perform_in_browser,have_css,context,within,sign_in_and_require_viewing_personal_key,uuid,eq,confirm_email_in_a_different_browser,sign_up_start_url,and_return]
|
clicks user s start page without accordion and redirects user to SP after session timeout.
|
Again, Poltergeist didn't catch the fact that this field has a regex pattern requirement.
|
@@ -1208,9 +1208,9 @@ def regularize(cov, info, mag=0.1, grad=0.1, eeg=0.1, exclude='bads',
channel type separately. Special care is taken to keep the
rank of the data constant.
- Note. This function is kept for reasons of backwards-compatibility.
- Please consider explicitly using the `method` parameter in
- compute_covariance to directly combine estimation with regularization
+ **Note:** This function is kept for reasons of backward-compatibility.
+ Please consider explicitly using the ``method`` parameter in
+ `compute_covariance` to directly combine estimation with regularization
in a data-driven fashion.
Parameters
|
[make_ad_hoc_cov->[Covariance],_get_covariance_classes->[_ShrunkCovariance->[fit->[fit]],_RegCovariance->[fit->[fit,Covariance]]],_undo_scaling_array->[_apply_scaling_array],write_cov->[save],Covariance->[__iadd__->[_check_covs_algebra],__add__->[_check_covs_algebra]],_estimate_rank_meeg_signals->[_apply_scaling_array,_undo_scaling_array],_estimate_rank_meeg_cov->[_undo_scaling_cov,_apply_scaling_cov],_gaussian_loglik_scorer->[_logdet],compute_covariance->[_check_n_samples,_get_tslice,Covariance,_unpack_epochs],read_cov->[Covariance],_get_whitener_data->[compute_whitener],compute_whitener->[prepare_noise_cov],_auto_low_rank_model->[_cross_val],_undo_scaling_cov->[_apply_scaling_cov],prepare_noise_cov->[_get_ch_whitener],compute_raw_data_covariance->[Covariance,_check_n_samples]]
|
Regularize the covariance matrix of a single node. Missing N - block block - index. Compute the missing values in the covariance matrix.
|
maybe add a link to the FAQ section.
|
@@ -828,13 +828,13 @@ func (b *cloudBackend) runEngineAction(
<-displayDone
scope.Close() // Don't take any cancellations anymore, we're shutting down.
close(engineEvents)
- close(displayEvents)
close(displayDone)
contract.IgnoreClose(manager)
// Make sure that the goroutine writing to displayEvents and callerEventsOpt
// has exited before proceeding
<-eventsDone
+ close(displayEvents)
status := apitype.UpdateStatusSucceeded
if err != nil {
|
[CloudConsoleURL->[CloudURL],CreateStack->[CreateStack],runEngineAction->[Close,Refresh,Update,Destroy],tryNextUpdate->[CloudURL],GetStack->[GetStack],CancelCurrentUpdate->[GetStack],GetLatestConfiguration->[GetLatestConfiguration],EncryptValue->[EncryptValue],ListStacks->[ListStacks],DecryptValue->[DecryptValue],Read->[Read],Logout->[CloudURL],createAndStartUpdate->[Name],apply->[cloudConsoleStackPath,CloudConsoleURL,createAndStartUpdate],Close->[Close],GetLogs->[GetStack],DownloadPlugin->[DownloadPlugin],getCloudStackIdentifier->[Name],GetStack,Read]
|
runEngineAction runs an update action on the cloud provider. Update updates the state of an object.
|
shouldn't pulling on displayDone be done after pulling on eventsDone... ?
|
@@ -10,11 +10,16 @@ from six.moves import socketserver #type: ignore # pylint: disable=import-erro
import josepy as jose
import OpenSSL
+import pytest
from acme import errors
from acme import test_util
from acme.magic_typing import List # pylint: disable=unused-import, no-name-in-module
+# turns all ResourceWarnings into errors for this module
+if six.PY3:
+ pytestmark = pytest.mark.filterwarnings("ignore::ResourceWarning") # pragma: no cover
+
class SSLSocketAndProbeSNITest(unittest.TestCase):
"""Tests for acme.crypto_util.SSLSocket/probe_sni."""
|
[MakeCSRTest->[test_make_csr_must_staple->[_call_with_key],test_make_csr->[_call_with_key]],DumpPyopensslChainTest->[test_dump_pyopenssl_chain_wrapped->[_call],test_dump_pyopenssl_chain->[_call]],PyOpenSSLCertOrReqSANTest->[test_csr_idn_sans->[_call_csr,_get_idn_names],test_csr_hundred_sans->[_call_csr],test_csr_two_sans->[_call_csr],test_cert_no_sans->[_call_cert],_call_csr->[_call],test_cert_two_sans->[_call_cert],test_cert_idn_sans->[_get_idn_names,_call_cert],test_csr_no_sans->[_call_csr],test_csr_one_san->[_call_csr],test_csr_six_sans->[_call_csr],test_critical_san->[_call_cert],_call_cert->[_call],test_cert_hundred_sans->[_call_cert]],PyOpenSSLCertOrReqAllNamesTest->[test_cert_one_san_no_common->[_call_cert],test_cert_two_sans_yes_common->[_call_cert],_call_cert->[_call],test_cert_no_sans_yes_common->[_call_cert]],SSLSocketAndProbeSNITest->[test_probe_not_recognized_name->[_start_server],test_probe_ok->[_probe,_start_server],setUp->[_TestServer->[server_bind->[server_bind]],_TestServer]]]
|
Tests for acme. crypto_util. Test for the presence of a specific ethernet address.
|
Doesn't this do the opposite and ignores all `ResourceWarnings` in this module?
|
@@ -30,6 +30,9 @@ public interface MessageQueueWriter {
void write(List<RawMessageEvent> entries) throws MessageQueueException;
+ default void preFlightCheck() throws PreflightCheckException {
+ }
+
@AutoValue
abstract class Metrics {
public static class Provider implements javax.inject.Provider<MessageQueueWriter.Metrics> {
|
[Metrics->[Provider->[get->[build]],builder->[Builder]]]
|
Write a list of message events to the queue.
|
What do you think about moving this out of the interface? I think the implementation is inconsistent with other pre-flight checks. The MongoDB and Elasticsearch services don't implement checks this way, and potential plugin checks do neither.
|
@@ -55,11 +55,11 @@ class Options:
self.fast_parser = False
self.incremental = False
- def __eq__(self, other):
+ def __eq__(self, other: Any) -> bool:
return self.__class__ == other.__class__ and self.__dict__ == other.__dict__
- def __ne__(self, other):
+ def __ne__(self, other: Any) -> bool:
return not self == other
- def __repr__(self):
+ def __repr__(self) -> str:
return 'Options({})'.format(pprint.pformat(self.__dict__))
|
[Options->[__repr__->[pformat,'Options]]]
|
Initialize the object with default values.
|
This should actually be `object`.
|
@@ -303,7 +303,7 @@ feature 'Sign Up', devise: true do
# When I resend confirmation instructions to an existing user
# Then the user does not receive an email
context 'confirmation instructions sent to existing user', email: true do
- it 'does not send an email to the existing user' do
+ xit 'does not send an email to the existing user' do
user = create(:user)
visit '/'
|
[email,visit,have_title,password,create,phone,days,be,to_not,feature,current,it,freeze,to,trigger,have_content,before,click_button,click_link,scenario,t,require,signin,have_link,update,user_confirmation_url,each,hours,have_css,fill_in,context,sign_up_with,eq,and_return]
|
Updates a user s confirmation token. expects confirmation token.
|
this 'resend email confirmation' link is not currently in designs, hence the removal -- will discuss with ryan t and figure out where we can slide it...
|
@@ -94,6 +94,10 @@ class Dataset:
meta_name = self._config.get('dataset_meta')
if meta_name:
meta_name = Path(meta_name)
+ print_info("{dataset_name} dataset metadata will be saved to {file}".format(
+ dataset_name=self._config['name'], file=meta_name))
+ print_info('Converted annotation for {dataset_name} dataset will be saved to {file}'.format(
+ dataset_name=self._config['name'], file=Path(annotation_name)))
save_annotation(annotation, meta, Path(annotation_name), meta_name)
self._annotation = annotation
|
[create_subset->[make_subset],DatasetWrapper->[size->[__len__],reset->[reset],make_subset->[make_subset]],Dataset->[size->[__len__],reset->[_load_annotation],provide_data_info->[set_annotation_metadata],__init__->[DatasetConfig]]]
|
Load the annotation and meta from the config.
|
Do you check path representation, when you print it? If I not mistaken, it will be printed like PosixPath('path')
|
@@ -43,10 +43,17 @@ class Package extends BasePackage
protected $stability;
protected $notificationUrl;
+ /**
+ * @var Link[]
+ */
protected $requires = array();
protected $conflicts = array();
protected $provides = array();
protected $replaces = array();
+
+ /**
+ * @var Link[]
+ */
protected $devRequires = array();
protected $suggests = array();
protected $autoload = array();
|
[Package->[getSourceUrls->[getUrls],getDistUrls->[getUrls]]]
|
Creates a new package from a given version. Constructor for .
|
this should also be done for other properties (conflicts, provides and replaces are also package links)
|
@@ -113,6 +113,11 @@ public class RegisterIsoCmd extends BaseCmd {
description = "true if ISO should bypass Secondary Storage and be downloaded to Primary Storage on deployment")
private Boolean directDownload;
+ @Parameter(name = ApiConstants.PASSWORD_ENABLED,
+ type = CommandType.BOOLEAN,
+ description = "true if password reset feature is supported; default is false")
+ private Boolean passwordEnabled;
+
/////////////////////////////////////////////////////
/////////////////// Accessors ///////////////////////
/////////////////////////////////////////////////////
|
[RegisterIsoCmd->[execute->[setResponseName,getCommandName,setResponses,setResponseObject,createIsoResponses,ServerApiException,registerIso],getEntityOwnerId->[finalyzeAccountId,getId],getLogger,getName]]
|
Returns true if the node is bootable.
|
@anuragaw let's export this via the template response (listIsos response should return this key) and we're golden!
|
@@ -710,6 +710,8 @@ redo_accept:
X509_free(signer);
X509_STORE_free(store);
X509_VERIFY_PARAM_free(vpm);
+ if (rsign_sigopts != NULL)
+ sk_OPENSSL_STRING_free(rsign_sigopts);
EVP_PKEY_free(key);
EVP_PKEY_free(rkey);
X509_free(cert);
|
[No CFG could be retrieved]
|
Reads the OCSP header and returns the response. Handle OCSP responses.
|
The NULL check isn't necesary.
|
@@ -3,6 +3,8 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+from spack import *
+
class PyGrpcio(PythonPackage):
"""HTTP/2-based RPC framework."""
|
[PyGrpcio->[setup_build_environment->[prepend_path,set,dependencies],patch->[filter_file,satisfies],depends_on,version]]
|
Creates a new object with the version number and description. Return a list of version numbers for all sequence numbers. - - - - - - - - - - - - - - - - - -.
|
You'll need another blank line after this to get the flake8 and isort checks to pass.
|
@@ -4381,9 +4381,9 @@ class ConfigProviders(Config):
curProvObj = sorted_providers[curProvider]
curProvObj.enabled = curEnabled
if curEnabled:
- providers_reordered.insert(0, curProvider)
+ enabled_providers.append(curProvider)
else:
- providers_reordered.append(curProvider)
+ disabled_providers.append(curProvider)
except:
continue
|
[LogoutHandler->[prepare->[redirect]],ConfigSubtitles->[index->[render,ConfigMenu],saveSubtitles->[redirect]],HomePostProcess->[index->[render],processEpisode->[redirect,_genericMessage]],ConfigAnime->[index->[render,ConfigMenu],saveAnime->[redirect]],History->[index->[render],clearHistory->[redirect],trimHistory->[redirect]],WebRoot->[toggleScheduleDisplayPaused->[redirect],schedule->[render],toggleDisplayShowSpecials->[redirect],setHomeLayout->[redirect],setScheduleSort->[redirect],setHistoryLayout->[redirect],apibuilder->[render,titler],index->[redirect],setScheduleLayout->[redirect]],HomeNews->[index->[render]],ConfigSearch->[index->[render,ConfigMenu],saveSearch->[redirect]],ConfigNotifications->[index->[render,ConfigMenu],saveNotifications->[redirect]],HomeIRC->[index->[render]],ConfigProviders->[index->[render,ConfigMenu],saveProviders->[index,redirect]],HomeChangeLog->[index->[render]],ErrorLogs->[index->[render,ErrorLogsMenu],clearerrors->[redirect],viewlog->[render],submit_errors->[redirect]],Config->[index->[render,ConfigMenu]],WebHandler->[prepare->[async_call]],Manage->[massUpdate->[refreshShow,redirect,updateShow],episodeStatuses->[render],failedDownloads->[render,redirect],backlogShow->[redirect],manageTorrents->[render],massEdit->[render,redirect],massEditSubmit->[redirect,editShow],backlogOverview->[render],subtitleMissed->[render],changeEpisodeStatuses->[redirect,setStatus],index->[render],downloadSubtitleMissed->[redirect]],ConfigBackupRestore->[index->[render,ConfigMenu]],ConfigPostProcessing->[index->[render,ConfigMenu],savePostProcessing->[redirect]],ConfigGeneral->[generateApiKey->[generateApiKey],index->[render,ConfigMenu],saveGeneral->[redirect]],LoginHandler->[checkAuth->[render,redirect],prepare->[async_call]],HomeAddShows->[addNewShow->[finishAddShow->[redirect,newShow],finishAddShow,redirect,sanitizeFileName],addExistingShows->[redirect,split_extra_show,newShow],popularShows->[render],getTrendingShows->[render],recommendedShows->[render],existingShows->[render],massAddTable->[render],addTraktShow->[sanitizeFileName,redirect],getRecommendedShows->[render],sanitizeFileName->[sanitizeFileName],index->[render],trendingShows->[render],addShowToBlacklist->[redirect],newShow->[render]],ManageSearches->[forceSearch->[redirect],pauseBacklog->[redirect],forceBacklog->[redirect],forceFindPropers->[redirect],index->[render]],BaseHandler->[render->[render_string]],Home->[doRename->[redirect,_genericMessage],updateCheck->[redirect],testRename->[render,_genericMessage],getManualSearchStatus->[getEpisodes],_genericMessage->[render],update->[render,redirect,_genericMessage,update],deleteEpisode->[deleteEpisode,redirect,_genericMessage],shutdown->[redirect,_genericMessage],restart->[render,redirect,_genericMessage],status->[render],updateEMBY->[redirect],retryEpisode->[_getEpisode],togglePause->[redirect,_genericMessage],editShow->[render,redirect,_genericMessage],refreshShow->[redirect,_genericMessage],searchEpisode->[_getEpisode],setStatus->[redirect,_genericMessage],updateKODI->[redirect],subtitleShow->[redirect,_genericMessage],displayShow->[_genericMessage,haveKODI,haveEMBY,render,titler],deleteShow->[redirect,_genericMessage],updatePLEX->[redirect],branchCheckout->[redirect,update],updateShow->[redirect,updateShow,_genericMessage],setSceneNumbering->[_getEpisode],index->[render],searchEpisodeSubtitles->[_getEpisode]]]
|
Save the newznab and torrent providers to the database. This function is called from the constructor of the newznabProvider class. It is add a new provider to the list of available providers and update it with the new one.
|
@echel0n this line means that providers which a lower preference become the most preferred when saving providers.
|
@@ -70,7 +70,7 @@ final class PageTrashSubscriber implements EventSubscriberInterface
return;
}
- $this->trashManager->store(BasePageDocument::RESOURCE_KEY, $event->getDocument());
+ $this->trashManager->store(BasePageDocument::RESOURCE_KEY, $document);
$this->hasPendingTrashItem = true;
}
|
[PageTrashSubscriber->[flushTrashItem->[flush],storePageToTrash->[store,getDocument]]]
|
Stores the page to trash.
|
Can you describe this change?
|
@@ -134,14 +134,14 @@ vos_imem_strts_create(struct vos_imem_strts *imem_inst)
rc = d_uhash_create(0 /* no locking */, VOS_POOL_HHASH_BITS,
&imem_inst->vis_pool_hhash);
if (rc) {
- D_ERROR("Error in creating POOL ref hash: %d\n", rc);
+ D_ERROR("Error in creating POOL ref hash: "DF_RC"\n", DP_RC(rc));
goto failed;
}
rc = d_uhash_create(D_HASH_FT_EPHEMERAL, VOS_CONT_HHASH_BITS,
&imem_inst->vis_cont_hhash);
if (rc) {
- D_ERROR("Error in creating CONT ref hash: %d\n", rc);
+ D_ERROR("Error in creating CONT ref hash: "DF_RC"\n", DP_RC(rc));
goto failed;
}
|
[vos_bio_addr_free->[vea_free,bio_addr_is_hole,D_ASSERT,vos_byte2blkcnt,vos_byte2blkoff,umem_free,D_ERROR],int->[obj_tree_register,bio_xsctxt_alloc,vos_ilog_init,vos_obj_tab_register,vos_dtx_cos_register,bio_nvme_init,vos_cont_tab_register,dbtree_class_register,D_ERROR,vos_dtx_table_register],vos_fini->[vos_fini_locked,D_MUTEX_UNLOCK,D_MUTEX_LOCK,D_ASSERT,gc_wait],inline->[vos_obj_cache_destroy,d_uhash_destroy,vos_imem_strts_destroy,vos_obj_cache_create,D_ERROR,d_uhash_create],vos_tls_get->[dss_tls_get,dss_module_key_get],vos_get_obj_cache->[vos_tls_get],vos_init->[vos_mod_init,D_GOTO,ABT_init,strcasecmp,vos_fini_locked,D_MUTEX_UNLOCK,D_MUTEX_LOCK,vos_tls_init,getenv,ABT_finalize,D_INFO,vos_nvme_init],void->[D_FREE,bio_xsctxt_free,vos_tls_fini,bio_nvme_fini,d_list_empty,umem_fini_txd,vos_nvme_fini,D_ASSERT,D_INIT_LIST_HEAD,ABT_finalize,vos_imem_strts_destroy,D_ALLOC_PTR,umem_init_txd,vos_imem_strts_create]]
|
vos_imem_strts_create - create the object cache and hash tables.
|
(style) line over 80 characters
|
@@ -74,7 +74,10 @@ public abstract class BaseTopNAlgorithm<DimValSelector, DimValAggregateStore, Pa
{
boolean hasDimValSelector = (dimValSelector != null);
- final int cardinality = params.getCardinality();
+ int cardinality = params.getCardinality();
+ if (cardinality < 0) {
+ cardinality = Integer.MAX_VALUE;
+ }
int numProcessed = 0;
while (numProcessed < cardinality) {
final int numToProcess;
|
[BaseTopNAlgorithm->[makeResultBuilder->[getThreshold,getAggregatorSpecs,getTime,getComparator,getDimensionSpec,getResultBuilder,getPostAggregatorSpecs],AggregatorArrayProvider->[build->[fill,computeStartEnd]],makeAggregators->[factorize,size],BaseArrayProvider->[computeStartEnd->[getThreshold,of,max,lookupId,min,getDimensionsFilter],skipTo->[dimensionValuesSorted],getValueCardinality,UnsupportedOperationException],run->[updateResults,reset,getCardinality,min,updateDimValSelector,getNumValuesPerPass,closeAggregators,scanAndAggregate,makeDimValSelector,makeDimValAggregateStore,computeNewLength],makeBufferAggregators->[factorizeBuffered,size]]]
|
This method runs the algorithm until a is found.
|
Why this change should be a part of this PR? cardinality < 0 probably means that completely different approach should be taken to compute topN, setting just to `Integer.MAX_VALUE` may result in a lot of excessive work and allocations in `run()` method.
|
@@ -1,3 +1,15 @@
+''' A categorical bar chart illustrating the use of `factor_cmap <factor_cmap>`_ to associate colors from a palette
+with categories.
+
+.. rubric:: Details
+
+:bokeh APIs: :func:`~bokeh.plotting.Figure.vbar`, :func:`~bokeh.transform.factor_cmap`
+:references: :ref:`userguide_categorical_bars`
+:keywords: bar, vbar, legend, factor_cmap, palette
+
+|
+
+'''
from bokeh.io import output_file, show
from bokeh.models import ColumnDataSource
from bokeh.palettes import Spectral6
|
[ColumnDataSource,vbar,show,output_file,dict,factor_cmap,figure]
|
Plot a bar showing the fruit counts.
|
@hyles-lineata, what's this marker for?
|
@@ -97,8 +97,8 @@ define([
});
}
- var vertexShaderText = vertexShaderSource.createCombinedVertexShader();
- var fragmentShaderText = fragmentShaderSource.createCombinedFragmentShader();
+ var vertexShaderText = vertexShaderSource.createCombinedVertexShader(this._context.webgl2);
+ var fragmentShaderText = fragmentShaderSource.createCombinedFragmentShader(this._context.webgl2);
var keyword = vertexShaderText + fragmentShaderText + JSON.stringify(attributeLocations);
var cachedShader;
|
[No CFG could be retrieved]
|
Creates a new shader program based on the provided options. The object that holds the count of the n - ary keyword.
|
It is slightly cleaner to just pass `context` through and only access `context.webgl2` in `combineShader`. Also this lets us access any other properties of `context` in the future.
|
@@ -28,15 +28,10 @@ import org.mockito.junit.jupiter.MockitoExtension;
class SwingActionTest {
private static final Object VALUE = new Object();
- private static final Runnable RUNNABLE_THROWING_EXCEPTION =
- () -> {
- throw new IllegalStateException();
- };
+ private static Object throwException() {
+ throw new IllegalStateException();
+ }
- private static final Supplier<?> SUPPLIER_THROWING_EXCEPTION =
- () -> {
- throw new IllegalStateException();
- };
@Mock private Runnable action;
@Mock private ActionEvent event;
@Mock private ActionListener listener;
|
[SwingActionTest->[testInvokeAndWait_ShouldRethrowActionUncheckedExceptionWhenCalledOnEdt->[invokeAndWait,assertThrows],testInvokeAndWait_ShouldInvokeActionWhenCalledOffEdt->[invokeAndWait,run],testInvokeAndWaitResult_ShouldRethrowActionUncheckedExceptionWhenCalledOffEdt->[invokeAndWaitResult,assertThrows],testInvokeNowOrLater->[ofSeconds,assertTimeoutPreemptively,CountDownLatch,invokeNowOrLater],testInvokeAndWait_ShouldInvokeActionWhenCalledOnEdt->[invokeAndWait,assertDoesNotThrow,run],testInvokeAndWaitResult_ShouldRethrowActionUncheckedExceptionWhenCalledOnEdt->[invokeAndWait,invokeAndWaitResult,assertThrows],testInvokeAndWait_ShouldRethrowActionUncheckedExceptionWhenCalledOffEdt->[invokeAndWait,assertThrows],testInvokeAndWaitResult_ShouldReturnActionResultWhenCalledOffEdt->[assertEquals,invokeAndWaitResult],testActionOf->[getValue,assertEquals,of,actionPerformed],testInvokeAndWaitResult_ShouldReturnActionResultWhenCalledOnEdt->[assertDoesNotThrow,get,invokeAndWaitResult,set,invokeAndWait,assertEquals],testKeyReleaseListener->[keyReleaseListener,mock,accept,keyReleased],IllegalStateException,Object]]
|
This method is used to test if a key has an action. Test method for key release listener.
|
Any thoughts to inline this directly? I think that might be a bit more direct.
|
@@ -237,7 +237,13 @@ class ChannelGraph(object):
if not valid_timeout:
continue
- yield Route(path, channel)
+ if channel.network_state == NODE_NETWORK_UNKNOWN:
+ online_nodes.append(Route(path, channel))
+
+ if channel.network_state == NODE_NETWORK_REACHABLE:
+ unknown_nodes.append(Route(path, channel))
+
+ return online_nodes + unknown_nodes
def has_path(self, source_address, target_address):
""" True if there is a connecting path regardless of the number of hops. """
|
[ChannelGraph->[has_path->[has_path],get_best_routes->[get_shortest_paths],__init__->[make_graph]]]
|
Yields the best routes for a given amount of blocks. Yields a route if there is a path to a node with the given address.
|
Here it seems to me that it should be the other way around. If the `state` is `UNKNOWN` then it should be added to `unknown_nodes` and if `REACHABLE` to `online_nodes`, right?
|
@@ -126,7 +126,11 @@ func sendConfigToChannel(configurationChan chan<- types.ConfigMessage, configura
}
func loadFileConfig(filename string) (*types.Configuration, error) {
- configuration := new(types.Configuration)
+ configuration := &types.Configuration{
+ Frontends: make(map[string]*types.Frontend),
+ Backends: make(map[string]*types.Backend),
+ TLSConfiguration: make([]*tls.Configuration, 0),
+ }
if _, err := toml.DecodeFile(filename, configuration); err != nil {
return nil, fmt.Errorf("error reading configuration file: %s", err)
}
|
[watcherCallback->[LoadConfig,Errorf,Stat,Debugf],Provide->[LoadConfig,Dir,addWatcher],addWatcher->[Go,NewWatcher,Close,Errorf,Split,Add],Join,IsDir,Name,Warnf,ReadDir,Errorf,DecodeFile,HasSuffix]
|
watcherCallback is the callback that is called when a file is watched and the configuration is sent loadContentConfiguration loads the content configuration from the specified directory.
|
not needed because it's not a pointer on a slice.
|
@@ -935,6 +935,11 @@ function elgg_view_entity_icon(ElggEntity $entity, $size = 'medium', $vars = arr
* @return string/false Rendered annotation
*/
function elgg_view_annotation(ElggAnnotation $annotation, array $vars = array(), $bypass = true, $debug = false) {
+
+ if (!$annotation || !($annotation instanceof ElggAnnotation)) {
+ return false;
+ }
+
global $autofeed;
$autofeed = true;
|
[elgg_view_list_item->[getType],elgg_view_entity->[getType,getSubtype],elgg_view_river_item->[getSubjectEntity,getView,getObjectEntity],elgg_view_entity_icon->[getType,getSubtype],elgg_view_entity_annotations->[getType],elgg_view_menu->[getMenu,getSelected],elgg_view_comments->[getType,getSubtype]]
|
View an annotation.
|
FYI with instanceof you don't need a separate check for bool/object. It doesn't throw errors.
|
@@ -62,6 +62,8 @@ namespace DynamoSandbox
internal class Program
{
+ private static readonly SettingsMigrationWindow migrationWindow = new SettingsMigrationWindow();
+
private static void MakeStandaloneAndRun(string commandFilePath, out DynamoViewModel viewModel)
{
var geometryFactoryPath = string.Empty;
|
[Program->[Main->[Exit,Execute,IsCrashing,Message,Empty,WriteLine,NotifyCrash,LogException,OnRequestsCrashPrompt,StackTrace,MakeStandaloneAndRun,Length],PreloadShapeManager->[PreloaderLocation,GeometryFactoryPath,Version221,Version220,Preload,Location,GetDirectoryName,Version219],MakeStandaloneAndRun->[Start,PreloadShapeManager,Run,Empty]],PathResolver->[Empty]]
|
This method creates a standalone DynamoView and runs it.
|
This will be created regardless of migration actually happening, can we `new` only in `SettingsMigrationEventArgs.EventStatusType.Begin`?
|
@@ -374,3 +374,7 @@ def sum_order_totals(qs):
zero = Money(0, currency=settings.DEFAULT_CURRENCY)
taxed_zero = TaxedMoney(zero, zero)
return sum([order.total for order in qs], taxed_zero)
+
+
+def get_valid_shipping_methods(order: Order):
+ return applicable_shipping_methods(order, price=order.get_subtotal().gross.amount)
|
[order_needs_automatic_fullfilment->[order_line_needs_automatic_fulfillment],cancel_fulfillment->[update_order_status],update_order_prices->[recalculate_order],automatically_fulfill_digital_lines->[order_line_needs_automatic_fulfillment,fulfill_order_line]]
|
Sum all order totals in a queryset.
|
What do we gain from having this function? It doesn't do any additional logic. Also, we already have a function with the same name for checkout.
|
@@ -10,10 +10,11 @@ class PyGraphqlCore(PythonPackage):
for APIs created by Facebook."""
homepage = "https://github.com/graphql-python/graphql-core"
- url = "https://github.com/graphql-python/graphql-core/archive/v3.1.2.tar.gz"
+ pypi = "graphql-core/graphql-core-3.1.5.tar.gz"
- version('3.1.2', sha256='16087360d34f9cfa295b401fc17f9f11bcddef0e6e0dc5a694bbe2298b31949b')
- version('3.0.5', sha256='88021f8b879f18cf56523644e51e1552b126a9ad9ab218f579bf503d236d5272')
+ version('3.1.2', sha256='c056424cbdaa0ff67446e4379772f43746bad50a44ec23d643b9bdcd052f5b3a')
+ version('3.0.5', sha256='51f7dab06b5035515b23984f6fcb677ed909b56c672152699cca32e03624992e')
+ version('2.3.2', sha256='aac46a9ac524c9855910c14c48fc5d60474def7f99fd10245e76608eba7af746')
- depends_on('[email protected]:3.999', type=('build', 'run'))
+ depends_on('[email protected]:', type=('build', 'run'))
depends_on('py-setuptools', type='build')
|
[PyGraphqlCore->[depends_on,version]]
|
Creates a list of all unique - element identifiers.
|
Can you undo this change, the `setup.py` contains: `python_requires=">=3.6,<4"`
|
@@ -76,6 +76,7 @@ struct obj_auxi_args {
* ec_in_recov -- a EC recovery task
*/
uint32_t io_retry:1,
+ io_task_reinited:1,
args_initialized:1,
to_leader:1,
spec_shard:1,
|
[No CFG could be retrieved]
|
The following functions are used to populate the object object and the object s target object. - - - - - - - - - - - - - - - - - -.
|
Hmm, reinit is called somewhere else as well, but reinited = 1 is only set here. probably io_retried is clearer here. Or I miss sth?
|
@@ -7025,6 +7025,14 @@ spa_async_thread(void *arg)
!spa_feature_is_enabled(dp->dp_spa, SPA_FEATURE_RESILVER_DEFER)))
dsl_resilver_restart(dp, 0);
+ if (tasks & SPA_ASYNC_INITIALIZE_RESTART) {
+ mutex_enter(&spa_namespace_lock);
+ spa_config_enter(spa, SCL_CONFIG, FTAG, RW_READER);
+ vdev_initialize_restart(spa->spa_root_vdev);
+ spa_config_exit(spa, SCL_CONFIG, FTAG);
+ mutex_exit(&spa_namespace_lock);
+ }
+
/*
* Let the world know that we're done.
*/
|
[No CFG could be retrieved]
|
Remove all devices from the cache. function to handle async and async threads.
|
mismerge? this seems to be duplicate.
|
@@ -305,9 +305,13 @@ class Checkout:
self._add_to_user_address_book(
self.billing_address, is_billing=True)
- shipping_price = (
- self.shipping_method.get_total() if self.shipping_method
- else Price(0, currency=settings.DEFAULT_CURRENCY))
+ if self.shipping_method:
+ shipping_price = self.shipping_method.get_total_price()
+ else:
+ shipping_price = TaxedMoney(
+ net=Money(0, currency=settings.DEFAULT_CURRENCY),
+ gross=Money(0, currency=settings.DEFAULT_CURRENCY))
+
order_data = {
'language_code': get_language(),
'billing_address': billing_address,
|
[load_checkout->[func->[from_storage,for_storage]],Checkout->[billing_address->[_get_address_from_storage],create_order->[_save_order_shipping_address,_save_order_billing_address,_add_to_user_address_book,is_shipping_required],is_shipping_required->[is_shipping_required],shipping_address->[_get_address_from_storage],deliveries->[is_shipping_required],recalculate_discount->[_get_voucher]]]
|
Create an order from the checkout session. This method is called when a cart is not in the cart.
|
Why do we intentionally discard the net price here?
|
@@ -74,7 +74,7 @@ ica.apply(epochs) # clean data, default in place
evoked = [epochs[k].average() for k in event_ids]
-contrast = evoked[1] - evoked[0]
+contrast = combine_evoked(evoked, weights=[-1, 1]) # Faces - scrambled
evoked.append(contrast)
|
[plot_overlay,plot_events,create_eog_epochs,filter,show_view,make_inverse_operator,make_forward_solution,resample,show,dict,compute_covariance,convert_forward_solution,apply,evoked,epochs,make_field_map,plot,append,read_raw_ctf,print,find_events,data_path,read_source_spaces,find_bads_eog,plot_components,average,setup_source_space,set_time,isfile,apply_inverse,ICA,Epochs,plot_scores,pick_types]
|
Plot the events to get an idea of the paradigm. Visualize fields on the helmetal object.
|
actually, I find the original code much clearer in this instance.
|
@@ -80,6 +80,17 @@ def _raise_did_you_mean(address_family: AddressFamily, name: str, source=None) -
raise resolve_error from source
raise resolve_error
+@rule
+async def find_build_file(address: Address) -> BuildFileAddress:
+ address_family = await Get[AddressFamily](Dir(address.spec_path))
+ if address not in address_family.addressables:
+ _raise_did_you_mean(address_family=address_family, name=address.target_name)
+ return next(
+ build_file_address
+ for build_file_address in address_family.addressables.keys()
+ if build_file_address == address
+ )
+
@rule
async def hydrate_struct(address_mapper: AddressMapper, address: Address) -> HydratedStruct:
|
[_hydrate->[ResolvedTypeMismatchError],hydrate_struct->[consume_dependencies->[maybe_consume],collect_inline_dependencies->[maybe_append],_raise_did_you_mean,consume_dependencies,collect_inline_dependencies],addresses_with_origins_from_address_families->[_raise_did_you_mean],address_origin_map->[AddressOriginMap]]
|
Raise an exception if a is not found in the namespace. Recursively collect all the dependencies of a given object and hydrate them into a list of chains. Get a HydratedStruct with a HydratedStruct with a HydratedDependency.
|
So, this works, but we should take a look at how it is being used, and see whether it should be batched. The parsing will be memoized obviously, but each usage is O(N) in the number of targets in a directory.
|
@@ -242,14 +242,14 @@ void menu_temperature() {
EDIT_ITEM_FAST(percent, MSG_EXTRA_FAN_SPEED_1, &thermalManager.new_fan_speed[0], 3, 255);
#endif
#endif
- #if HAS_FAN1 || (ENABLED(SINGLENOZZLE) && EXTRUDERS > 1)
+ #if HAS_FAN1 || (DISABLED(SINGLENOZZLE) && EXTRUDERS > 1)
editable.uint8 = thermalManager.fan_speed[1];
EDIT_ITEM_FAST(percent, MSG_FAN_SPEED_2, &editable.uint8, 0, 255, [](){ thermalManager.set_fan_speed(1, editable.uint8); });
#if ENABLED(EXTRA_FAN_SPEED)
EDIT_ITEM_FAST(percent, MSG_EXTRA_FAN_SPEED_2, &thermalManager.new_fan_speed[1], 3, 255);
#endif
#endif
- #if HAS_FAN2 || (ENABLED(SINGLENOZZLE) && EXTRUDERS > 2)
+ #if HAS_FAN2 || (DISABLED(SINGLENOZZLE) && EXTRUDERS > 2)
editable.uint8 = thermalManager.fan_speed[2];
EDIT_ITEM_FAST(percent, MSG_FAN_SPEED_3, &editable.uint8, 0, 255, [](){ thermalManager.set_fan_speed(2, editable.uint8); });
#if ENABLED(EXTRA_FAN_SPEED)
|
[No CFG could be retrieved]
|
Protected read from System. in Extended hot - end.
|
This condition doesn't get rid of the issue that `HAS_FAN1` is made irrelevant. I will look into the back-history of this code to see why it is shaped the way it is.
|
@@ -208,8 +208,8 @@ func (cfg *AutoScalingConfig) RegisterFlags(argPrefix string, f *flag.FlagSet) {
f.StringVar(&cfg.RoleARN, argPrefix+".role-arn", "", "AWS AutoScaling role ARN")
f.Int64Var(&cfg.MinCapacity, argPrefix+".min-capacity", 3000, "DynamoDB minimum provision capacity.")
f.Int64Var(&cfg.MaxCapacity, argPrefix+".max-capacity", 6000, "DynamoDB maximum provision capacity.")
- f.Int64Var(&cfg.OutCooldown, argPrefix+".out-cooldown", 3000, "DynamoDB minimum time between each autoscaling event that increases provision capacity.")
- f.Int64Var(&cfg.InCooldown, argPrefix+".in-cooldown", 3000, "DynamoDB minimum time between each autoscaling event that decreases provision capacity.")
+ f.Int64Var(&cfg.OutCooldown, argPrefix+".out-cooldown", 1800, "DynamoDB minimum seconds between each autoscale up.")
+ f.Int64Var(&cfg.InCooldown, argPrefix+".in-cooldown", 1800, "DynamoDB minimum seconds between each autoscale down.")
f.Float64Var(&cfg.TargetValue, argPrefix+".target-value", 80, "DynamoDB target ratio of consumed capacity to provisioned capacity.")
}
|
[GetReadQueries->[GetReadQueries,forSchemasIndexQuery],hourlyBuckets->[tableForBucket],GetReadQueriesForMetricLabel->[GetReadQueriesForMetricLabel,forSchemasIndexQuery],dailyBuckets->[tableForBucket],GetWriteEntries->[GetWriteEntries,forSchemasIndexEntry],GetReadQueriesForMetric->[GetReadQueriesForMetric,forSchemasIndexQuery],GetReadQueriesForMetricLabelValue->[GetReadQueriesForMetricLabelValue,forSchemasIndexQuery],RegisterFlags->[RegisterFlags]]
|
RegisterFlags registers flags for the Auto Scaling config. TableDesc is a TableDesc for a sequence of tables that are not in the table list.
|
All usages of this are as a time.Duration, so perhaps we should make this a DurationVar?
|
@@ -338,8 +338,11 @@ def compute_covariance(epochs, keep_sample_mean=True, tmin=None, tmax=None,
cov : instance of Covariance
The computed covariance.
"""
+
if not isinstance(epochs, list):
- epochs = [epochs]
+ epochs = _unpack_epochs(epochs)
+ else:
+ epochs = reduce(add, [_unpack_epochs(epoch) for epoch in epochs])
# check for baseline correction
for epochs_t in epochs:
|
[write_cov->[save],Covariance->[__iadd__->[_check_covs_algebra],__add__->[_check_covs_algebra]],_get_whitener->[rank],whiten_evoked->[prepare_noise_cov],compute_covariance->[Covariance,_check_n_samples],read_cov->[Covariance],compute_whitener->[prepare_noise_cov],prepare_noise_cov->[_get_whitener],compute_raw_data_covariance->[Covariance,_check_n_samples]]
|
Estimate the covariance matrix from a list of Epochs or a list of Epochs Check if all the epochs have compatible Compute the covariance matrix for a .
|
just to check would: sum([_unpack_epochs(epoch) for epoch in epochs]) work too?
|
@@ -80,8 +80,11 @@ public class AvroDataTranslator implements DataTranslator {
private Iterable<String> names;
- TypeNameGenerator() {
- this(ImmutableList.of(KsqlConstants.AVRO_SCHEMA_FULL_NAME));
+ TypeNameGenerator(final Map<String, String> properties) {
+ final String schemaFullName = properties.get(KsqlAvroTopicSerDe.AVRO_SCHEMA_FULL_NAME);
+ this.names = ImmutableList.of(schemaFullName == null
+ ? KsqlConstants.AVRO_SCHEMA_FULL_NAME :
+ StringUtil.cleanQuotes(schemaFullName));
}
private TypeNameGenerator(final Iterable<String> names) {
|
[AvroDataTranslator->[replaceSchema->[replaceSchema,name],toKsqlRow->[toKsqlRow],TypeNameGenerator->[with->[TypeNameGenerator]],toConnectRow->[toConnectRow],buildAvroCompatibleSchema->[avroCompatibleFieldName,with,name,buildAvroCompatibleSchema]]]
|
Creates a new avro - compatible row. Construct a schema with the fields.
|
Can we rename `AVRO_SCHEMA_FULL_NAME` to `DEFAULT_AVRO_SCHEMA_FULL_NAME` please?
|
@@ -53,8 +53,9 @@ def export_products(
def get_filename(model_name: str, file_type: str) -> str:
- return "{}_data_{}.{}".format(
- model_name, timezone.now().strftime("%d_%m_%Y"), file_type
+ hash = secrets.token_hex(nbytes=3)
+ return "{}_data_{}_{}.{}".format(
+ model_name, timezone.now().strftime("%d_%m_%Y_%H_%M_%S"), hash, file_type
)
|
[export_products_in_batches->[queryset_in_batches]]
|
Get filename for a given model name and file type.
|
Are we sure that this will not return any forbidden characters for the path file?
|
@@ -47,15 +47,15 @@ func (c *console) Close() error {
func (c *console) PublishEvent(
s op.Signaler,
opts outputs.Options,
- event common.MapStr,
+ event outputs.Data,
) error {
var jsonEvent []byte
var err error
if c.config.Pretty {
- jsonEvent, err = json.MarshalIndent(event, "", " ")
+ jsonEvent, err = json.MarshalIndent(event.Event, "", " ")
} else {
- jsonEvent, err = json.Marshal(event)
+ jsonEvent, err = json.Marshal(event.Event)
}
if err != nil {
logp.Err("Fail to convert the event to JSON (%v): %#v", err, event)
|
[PublishEvent->[SigCompleted,writeBuffer,Critical,Marshal,MarshalIndent,SigFailed,Err],writeBuffer->[Write],Stat,Unpack,RegisterOutputPlugin,Errorf]
|
PublishEvent publishes an event to the console.
|
can we renamed the field itself also to data? Otherwise we have inception below :-)
|
@@ -32,7 +32,7 @@ import games.strategy.triplea.util.Stopwatch;
/**
* A place to find images and map data for a ui.
*/
-public class UIContext extends AbstractUIContext implements IUIContext {
+public class UIContext extends AbstractUIContext {
protected MapData m_mapData;
protected final TileImageFactory m_tileImageFactory = new TileImageFactory();
protected final UnitImageFactory m_unitImageFactory = new UnitImageFactory();
|
[UIContext->[internalSetMapDir->[setScale],setScale->[setScale],shutDown->[shutDown]]]
|
Creates a UIContext object that can be used to display the UI for a game. Override to set the Cursor and Scale for the UIContext.
|
There are methods in IUIContext, no?
|
@@ -201,5 +201,12 @@ func ValidatePolicyConfig(config api.PolicyConfig) fielderrors.ValidationErrorLi
func ValidateProjectRequestConfig(config api.ProjectRequestConfig) fielderrors.ValidationErrorList {
allErrs := fielderrors.ValidationErrorList{}
+ if len(config.ProjectRequestTemplate) > 0 {
+ tokens := strings.Split(config.ProjectRequestTemplate, "/")
+ if len(tokens) != 2 {
+ allErrs = append(allErrs, fielderrors.NewFieldInvalid("projectRequestTemplate", config.ProjectRequestTemplate, "must be in the form: namespace/templateName"))
+ }
+ }
+
return allErrs
}
|
[Sprintf,TrimSpace,Parse,ParseCIDR,Prefix,NewFieldInvalid,HasSuffix,NewFieldRequired]
|
ValidateProjectRequestConfig validates a ProjectRequestConfig.
|
If ProjectRequestTemplate is required, shouldn't you throw an error if it's absent?
|
@@ -101,6 +101,18 @@ def write_mrk(fname, points):
raise ValueError(err)
+def average_mrks(mrk, mrk2):
+ if isinstance(mrk, basestring):
+ mrk = read_mrk(mrk)
+ if isinstance(mrk2, basestring):
+ mrk2 = read_mrk(mrk2)
+
+ np.testing.assert_array_equal(mrk.shape, mrk2.shape,
+ err_msg= 'Mismatch in the number of markers.')
+ mrk = (mrk+mrk2)/2
+ return mrk
+
+
def read_elp(fname):
"""ELP point extraction in Polhemus head space
|
[read_hsp->[findall,read,compile,open,array],read_elp->[findall,len,ValueError,compile,open,array],read_mrk->[splitext,loadtxt,append,asarray,unpack,seek,read,fromfile,ValueError,open,range,load,array],write_mrk->[dump,splitext,asarray,str,ValueError,savetxt,open],write_hsp->[write,asarray,len,str,ValueError,now,format,savetxt,open],read_sns->[findall,read,compile,open,array]]
|
Reads an ELP file and returns the Fiducial and marker points in Polh.
|
we never use np.testing outside of tests. Also shape are tuples so == works fine.
|
@@ -595,8 +595,6 @@ namespace System.ServiceModel.Syndication
}
}
reader.MoveToElement();
- string localName = reader.LocalName;
- string nameSpace = reader.NamespaceURI;
string val = (kind == TextSyndicationContentKind.XHtml) ? reader.ReadInnerXml() : reader.ReadElementString();
TextSyndicationContent result = new TextSyndicationContent(val, kind);
if (attrs != null)
|
[Atom10FeedFormatter->[SyndicationFeed->[TryParseFeedElementFrom,ReadItems],WriteItemContents->[WriteItemContents,WriteContentTo,WriteItemLastUpdatedTimeTo,WriteFeedTo,WriteElement,WriteCategoriesTo,WriteItemAuthorsTo,WriteLink,AsString,WriteItemContributorsTo],SyndicationLink->[ReadLink],ReadItemFrom->[TryParseItemElementFrom,ReadItemFrom],WriteContentTo->[WriteTo],ReadCategory->[ReadCategory],WriteCategoriesTo->[WriteCategory],WriteFeedLastUpdatedTimeTo->[WriteElement],WritePersonTo->[WriteElement],SyndicationItem->[ReadItemFrom],WriteItems->[WriteItem],WriteItem->[WriteItemContents],ReadFrom->[CanRead],SyndicationCategory->[ReadCategory],WriteFeedTo->[WriteContentTo,WriteElement,WriteCategoriesTo,WriteFeedLastUpdatedTimeTo,WriteFeedContributorsTo,WriteLink,WriteItems,WriteFeedAuthorsTo]]]
|
Reads text content from helper.
|
Do either LocalName or NamespaceURI have side-effects, e.g. advancing the reader, throwing exceptions if something couldn't be parsed, etc.?
|
@@ -49,12 +49,16 @@ class SetPassword(CreateToken):
token = graphene.String(
description="A one-time token required to set the password.", required=True
)
+ email = graphene.String(required=True)
+ password = graphene.String(required=True)
class Meta:
description = (
"Sets the user's password from the token sent by email "
"using the RequestPasswordReset mutation."
)
+ error_type_class = AccountError
+ error_type_field = "account_errors"
@classmethod
def mutate(cls, root, info, **data):
|
[BaseAddressUpdate->[perform_mutation->[clean_input],clean_input->[can_edit_address]],ConfirmAccount->[perform_mutation->[ConfirmAccount]],PasswordChange->[perform_mutation->[PasswordChange]],BaseAddressDelete->[clean_instance->[can_edit_address],perform_mutation->[clean_instance,check_permissions]],RequestPasswordReset->[perform_mutation->[RequestPasswordReset]],BaseCustomerCreate->[save->[save],Arguments->[UserCreateInput]]]
|
Mutate a node.
|
Where are the descriptions?
|
@@ -1448,11 +1448,7 @@ class URL implements Serializable {
return false;
}
URL other = (URL) obj;
- if (host == null) {
- if (other.host != null) {
- return false;
- }
- } else if (!host.equals(other.host)) {
+ if(!StringUtils.isEquals(host, other.host)) {
return false;
}
if (parameters == null) {
|
[URL->[getUrlParameter->[getParameterAndDecoded,valueOf],addParameterIfAbsent->[URL,getParameters,hasParameter],getMethodPositiveParameter->[getMethodParameter],equals->[equals],addParametersIfAbsent->[URL,getParameters],toJavaURL->[toString,URL],getPositiveIntParameter->[getPositiveParameter],getMethodParameterAndDecoded->[decode],hashCode->[hashCode],getMethodBooleanParameter->[getMethodParameter],getServiceKey->[getParameter],getBackupUrls->[setAddress],setUsername->[URL],addParameters->[addParameters,URL,getParameters],hasMethodParameter->[getMethodParameter],setProtocol->[URL],clearParameters->[URL],decode->[decode],encode->[encode],getMethodPositiveIntParameter->[getMethodPositiveParameter],isAnyHost->[getParameter],setAddress->[URL],toMap->[valueOf],getBooleanParameter->[getParameter],isLocalHost->[isLocalHost,getParameter],toServiceStringWithoutResolving->[buildString],getMethodParameter->[getMethodParameter,getParameter],buildKey->[toString,append],getBackupAddress->[getBackupAddress,getAddress],addParameter->[URL,getParameters,addParameter,valueOf],getColonSeparatedKey->[toString],getPositiveParameter->[getParameter],setServiceInterface->[addParameter],getPathKey->[getParameter],setHost->[URL],getParameterAndDecoded->[getParameterAndDecoded,decode],getRawParameter->[getParameter,valueOf],setPath->[URL],getServiceInterface->[getParameter],getMethodIntParameter->[getMethodParameter],getIntParameter->[getParameter],getParameter->[getParameter],hasParameter->[getParameter],setPort->[URL],buildParameters->[getParameters],buildString->[buildString,buildParameters,getHost,getIp,toString,getPath],append->[append,getParameter],removeParameters->[URL,getParameters,removeParameters],toServiceString->[buildString],addParameterString->[addParameters],toParameterString->[toString,toParameterString],setPassword->[URL],addParameterAndEncoded->[encode],valueOf->[URL,valueOf]]]
|
This method checks if the given object is immutable and has no duplicate values.
|
what about this block, should we refactor at the same time?
|
@@ -15,12 +15,12 @@ import (
// REST implements the RESTStorage interface in terms of an Registry.
type REST struct {
- registry Registry
+ client kclient.NamespaceInterface
}
-// NewStorage returns a new REST.
-func NewREST(registry Registry) apiserver.RESTStorage {
- return &REST{registry}
+// NewREST returns a RESTStorage object that will work against Project resources
+func NewREST(client kclient.NamespaceInterface) apiserver.RESTStorage {
+ return &REST{client: client}
}
// New returns a new Project for use with Create and Update.
|
[Get->[GetProject],List->[ListProjects],Create->[NewInvalid,ValidateProject,CreateProject,Errorf,Get,FillObjectMetaSystemFields],Delete->[DeleteProject]]
|
New returns a new object that represents a spec.
|
Another reason this has to be this way is that we have to run on kube directly - we should make a note somewhere that it's illegal to reference a kube registry from an openshift storage object.
|
@@ -190,10 +190,10 @@ export class AmpStoryAutoAds extends AMP.BaseElement {
constructor(element) {
super(element);
- /** @private {?../../amp-story/0.1/amp-story.AmpStory} */
+ /** @private {?../../amp-story/1.0/amp-story.AmpStory} */
this.ampStory_ = null;
- /** @private {?../../amp-story/0.1/navigation-state.NavigationState} */
+ /** @private {?../../amp-story/1.0/navigation-state.NavigationState} */
this.navigationState_ = null;
/** @private {number} */
|
[No CFG could be retrieved]
|
A class that exports AMP - Story auto - ad events. Private methods for the object that holds the state of the object.
|
Remnant of moving from `0.1` to `1.0`? Is this change required as part of moving to a new Closure Compiler or just convenient to make in this PR?
|
@@ -41,7 +41,7 @@ export function deepScan(
startNode,
callback,
arg = undefined,
- state = true,
+ state = /** @type {S} */ (/** @type {?} */ (true)),
includeSelf = true
) {
if (includeSelf) {
|
[No CFG could be retrieved]
|
Recursive function to find the top level in a subtree.
|
fmi: did directly casting to `S` not work?
|
@@ -420,9 +420,11 @@ namespace DotNetNuke.Services.Search.Internals
{
return searchPhrase;
}
-
+
+ string output = FoldToASCII(searchPhrase);
+
// we have a quotation marks and/or wildcard search, adjust accordingly
- var chars = searchPhrase.ToArray();
+ var chars = output.TrimEnd('\0').ToCharArray();
var insideQuote = false;
var newPhraseBulder = new StringBuilder();
var currentWord = new StringBuilder();
|
[SearchHelperImpl->[UpdateSearchStopWords->[UpdateSearchStopWords],UpdateSynonymsGroup->[GetSynonymsGroups,UpdateSynonymsGroup],EnsurePortalDefaultsAreSet->[AddSynonymsGroup,AddSearchStopWords],GetPortalsToReindex->[IsReindexRequested],GetSynonyms->[GetSynonymTerms],DeleteSynonymsGroup->[DeleteSynonymsGroup],AddSearchStopWords->[AddSearchStopWords],SynonymTermsCallBack->[GetSynonymsGroups],AddSynonymsGroup->[GetSynonymsGroups,AddSynonymsGroup],GetSynonymsGroupsCallBack->[EnsurePortalDefaultsAreSet],DeleteSearchStopWords->[DeleteSearchStopWords]]]
|
RephraseSearchText - Search for a phrase.
|
Please put the `Trim` inside `FoldToASCII`
|
@@ -145,7 +145,7 @@ namespace Microsoft.Extensions.FileProviders
ref _fileWatcher,
ref _fileWatcherInitialized,
ref _fileWatcherLock,
- _fileWatcherFactory);
+ _fileWatcherFactory) ?? throw new NullReferenceException(nameof(_fileWatcher));
}
set
{
|
[PhysicalFileProvider->[IDirectoryContents->[GetFullPath],Dispose->[Dispose],GetFullPath->[GetFullPath],IFileInfo->[GetFullPath]]]
|
gets or sets a value that determines if a file change event is polling. - This property is only effective when DOTNET_USE_POLLING_FILE_.
|
this should be a separate change - we should avoid any product changes when adding annotations
|
@@ -150,7 +150,7 @@ module Users
# Don't allow the user to modify UserTeam-s if he's not admin,
# unless he/she is modifying his/her UserTeam
if current_user != @user_t.user &&
- !is_admin_of_team(@user_t.team)
+ !can_read_team?(@user_t.team)
render_403
end
end
|
[UserTeamsController->[destroy->[destroy],update->[update]]]
|
load_user_team Loads the n - node UserTeam - s if the.
|
Here the same, I would make separate methods `check_leave_team_permission` and `check_manage_user_team_permission`.
|
@@ -109,7 +109,6 @@ static void write_pattern(struct mm_heap *heap_map, int heap_depth,
memset(
(void *)current_map->base, pattern,
current_map->count * current_map->block_size);
- sizeof(*current_map));
}
}
|
[No CFG could be retrieved]
|
get the total size of a given set of block_map allocate from system memory pool.
|
wow, we must have other not compiled code hidden by conditional compilation.
|
@@ -915,6 +915,10 @@ class ExceptionHandler(object):
'traceback': _format_exc(exc_val, exc_tb),
'conda_info': info_dict,
}
+
+ if isinstance(exc_val, CondaError):
+ error_report['conda_error_dump'] = exc_val.dump_map()
+
return error_report
def print_unexpected_error_report(self, error_report):
|
[conda_exception_handler->[ExceptionHandler],ExceptionHandler->[handle_exception->[EncodingError,_format_exc,CondaMemoryError,NoSpaceLeftError],get_error_report->[_format_exc],_print_conda_exception->[print_conda_exception]]]
|
Get error report from conda context. This method is called when an unexpected error has occurred. It will generate a message in the.
|
I'm not sure I like the name of this key here...
|
@@ -253,3 +253,9 @@ class DvcFileSystem(BaseFileSystem): # pylint:disable=abstract-method
ret[obj.hash_info.name] = obj.hash_info.value
return ret
+
+ def _download(self, from_info, to_file, **kwargs):
+ fs, path = self._get_fs_path(from_info)
+ fs._download( # pylint: disable=protected-access
+ path, to_file, **kwargs
+ )
|
[DvcFileSystem->[metadata->[check_isdir,_find_outs],_walk->[_walk,_add_dir],_add_dir->[_fetch_dir],info->[metadata],check_isdir->[_get_granular_hash],walk_files->[walk],walk->[_walk,_add_dir],open->[_get_granular_hash,open,_find_outs]]]
|
Return a dict with the info of a node in the tree.
|
For the record: this is a bit ugly, but it will turn into a cleaner and more proper `fs.get_file` when migrating dvcfs to fsspec.
|
@@ -325,6 +325,10 @@ class RemoteManager(object):
except ConnectionError as exc:
raise ConanConnectionError("%s\n\nUnable to connect to %s=%s"
% (str(exc), remote.name, remote.url))
+ except RecipeNotFoundException as exc:
+ raise NotFoundException("%s. [Remote: %s]" % (exception_message_safe(exc), remote.name))
+ except PackageNotFoundException as exc:
+ raise NotFoundException("%s. [Remote: %s]" % (exception_message_safe(exc), remote.name))
except ConanException as exc:
raise exc.__class__("%s. [Remote: %s]" % (exception_message_safe(exc), remote.name))
except Exception as exc:
|
[unzip_and_get_files->[check_compressed_files,remove],RemoteManager->[upload_package->[_package_integrity_check]],_compress_recipe_files->[add_tgz]]
|
Call a remote method.
|
What? So if calling a remote returns a RecipeNotFound you re-convert it to NotFoundException? Seems very weird.
|
@@ -147,7 +147,7 @@ class Plugin_Storage {
}
// If a plugin was activated or deactivated.
- $number_of_plugins_differ = count( self::$plugins ) !== count( get_option( self::ACTIVE_PLUGINS_OPTION_NAME, array() ) );
+ $number_of_plugins_differ = count( self::$plugins ) !== count( (array) get_option( self::ACTIVE_PLUGINS_OPTION_NAME, array() ) );
if ( $number_of_plugins_differ || true === self::$refresh_connected_plugins ) {
self::update_active_plugins_option();
|
[No CFG could be retrieved]
|
This method is called to configure the plugin manager.
|
Without being able to reproduce this, this is likely enough. Ideally, if we can run this code on an impacted site and confirm it isn't the `$plugins` that is a problematic. (I don't see how, but I don't trust myself).
|
@@ -72,6 +72,14 @@ def self_chat(opt, print_parser=None):
if hasattr(agent2, 'id'):
agent2.id = agent2.id + "2"
+ # Check for `selfchat` in the task name
+ if 'selfchat' not in opt['task']:
+ warn_once(
+ 'You are using self chat with task {}.'.format(opt['task']) +
+ 'If your task has an existing self chat world, then run with '
+ '-t {}:selfchat'.format(opt['task'])
+ )
+
world = create_task(opt, [agent1, agent2])
if print_parser:
|
[setup_args->[ParlaiParser,add_cmdline_args,set_defaults,add_argument],self_chat->[time,hasattr,print_args,parley,write,print,create_agent,parse_args,clone,get,WorldLogger,log,create_task,isinstance,seed,float,TimeLogger,display],setup_args,parse_args,self_chat]
|
Run the self - chats.
|
You missed a space. Also you can use `f'strings'` if you want!
|
@@ -142,9 +142,9 @@ def _get_dvc_repo_info(repo):
def add_parser(subparsers, parent_parser):
- VERSION_HELP = "Display the DVC version"
- " and system/environment information."
-
+ VERSION_HELP = (
+ "Display the DVC version and system/environment information."
+ )
version_parser = subparsers.add_parser(
"version",
parents=[parent_parser],
|
[_get_dvc_repo_info->[get],add_parser->[add_parser,set_defaults,append_doc_link],CmdVersion->[get_linktype_support_info->[link,append,is_link,uuid4,str,unlink,format,items,join,remove,open],get_fs_type->[disk_partitions,Path,chain],get_supported_remotes->[append,get_missing_deps,join],run->[,relpath,abspath,append,is_binary,python_version,get_fs_type,info,get_supported_remotes,Repo,get_linktype_support_info,format,join,exists,platform,warning,getcwd,_get_dvc_repo_info]],getLogger]
|
Adds a parser to subparsers to display the DVC version and system and environment information.
|
I'll update docs to match this.
|
@@ -45,4 +45,14 @@ public abstract class BaseCommand implements Runnable {
)
protected boolean dryRun = false;
+ protected static final Logger LOGGER = LoggerFactory.getLogger(Migrations.class);
+
+ @Override
+ public void run() {
+ final long startTime = System.nanoTime();
+ command();
+ LOGGER.info("Execution time: " + (System.nanoTime() - startTime) / 1000000000);
+ }
+
+ protected abstract void command();
}
|
[No CFG could be retrieved]
|
Check if the sequence number is not in the list of sequence numbers.
|
I think it's better to have the different classes each have their own loggers, rather than having them all share the `Migrations.class` logger. By having different loggers, users can more easily filter logs and can also configure different log levels for the different loggers. I assume your motivation for switching to a shared logger was so that the execution time would be logged by the same logger as the other messages from the command execution itself? I don't have a great solution but one option could be to have each command implement a simple `getLogger()` method, which we can then call here.
|
@@ -377,14 +377,16 @@ function poller_kill_stale_workers() {
// We killed the stale process.
// To avoid a blocking situation we reschedule the process at the beginning of the queue.
// Additionally we are lowering the priority.
- q("UPDATE `workerqueue` SET `executed` = '0000-00-00 00:00:00', `created` = '%s',
+ q("UPDATE `workerqueue` SET `executed` = '%s', `created` = '%s',
`priority` = %d, `pid` = 0 WHERE `pid` = %d",
+ dbesc(NULL_DATE),
dbesc(datetime_convert()),
intval(PRIORITY_NEGLIGIBLE),
intval($pid["pid"]));
} else
logger("Worker process ".$pid["pid"]." (".implode(" ", $argv).") now runs for ".round($duration)." of ".$max_duration." allowed minutes. That's okay.", LOGGER_DEBUG);
}
+ }
}
/**
|
[poller_execute->[max_processes_reached],poller_too_much_workers->[proc_run],poller_run->[maxload_reached,max_processes_reached,start_process],call_worker_if_idle->[proc_run,remove_inactive_processes],end_process]
|
Kill stale processes that are not running Update the workerqueue with executed = 0.
|
Standards: Please add braces to this `else`.
|
@@ -26,7 +26,7 @@ class ProjectActivitiesController < ApplicationController
end
def check_view_permissions
- unless can_view_project_activities(@project)
+ unless can_read_project?(@project)
render_403
end
end
|
[ProjectActivitiesController->[check_view_permissions->[can_view_project_activities],index->[render_to_string,render,respond_to,json,last_activities],load_vars->[find_by_id],before_action]]
|
Checks if the user has permission to view the nagios.
|
Favor modifier unless usage when having a single-line body. Another good alternative is the usage of control flow &&/||.
|
@@ -54,7 +54,7 @@ public class U2FRedisDeviceRepository extends BaseU2FDeviceRepository {
public Collection<? extends U2FDeviceRegistration> getRegisteredDevices() {
val expirationDate = LocalDate.now(ZoneId.systemDefault())
.minus(this.expirationTime, DateTimeUtils.toChronoUnit(this.expirationTimeUnit));
- val keys = (Set<String>) this.redisTemplate.keys(getPatternRedisKey());
+ val keys = (Set<String>) RedisUtils.keys(this.redisTemplate, getPatternRedisKey());
if (keys != null) {
return queryDeviceRegistrations(expirationDate, keys);
}
|
[U2FRedisDeviceRepository->[getRedisKeys->[getPatternRedisKey]]]
|
Gets registered devices.
|
Remove the cast
|
@@ -117,6 +117,12 @@ describe ProjectsController, type: :controller do
expect(response).to have_http_status(:unprocessable_entity)
expect(response.content_type).to eq 'application/json'
end
+
+ it 'never calls create activity service' do
+ expect(Activities::CreateActivityService).to receive(:call)
+
+ post :create, params: params, format: :json
+ end
end
end
|
[create,new,let,describe,eval,first,it,put,chr,to,before,advance,let!,require,have_http_status,each,id,context,get,eq]
|
It returns a list of objects with optional parameters that can be queried by the user. missing context actions.
|
Is this named correctly?
|
@@ -27,11 +27,11 @@ from .pipeline import AsyncPipeline, PipelineStep
from .queue import Signal
-def run_pipeline(video, encoder, decoder, render_fn, fps=30):
+def run_pipeline(video, encoder, decoder, render_fn, decoder_seq_size=16, fps=30):
pipeline = AsyncPipeline()
pipeline.add_step("Data", DataStep(video), parallel=False)
pipeline.add_step("Encoder", EncoderStep(encoder), parallel=False)
- pipeline.add_step("Decoder", DecoderStep(decoder), parallel=False)
+ pipeline.add_step("Decoder", DecoderStep(decoder, sequence_size=decoder_seq_size), parallel=False)
pipeline.add_step("Render", RenderStep(render_fn, fps=fps), parallel=True)
pipeline.run()
|
[softmax->[sum,exp],DataStep->[_open_video->[isOpened,int,next,VideoCapture],process->[_open_video,read,isOpened],end->[release],setup->[_open_video],__init__->[cycle,super,iter]],DecoderStep->[process->[infer,softmax,expand_dims,max,append,len,concatenate],__init__->[super,AsyncWrapper,deque]],RenderStep->[process->[update,time,render,_sync_time],__init__->[super,MovingAverageMeter],_sync_time->[sleep,time],end->[destroyAllWindows]],EncoderStep->[process->[infer,preprocess_frame,reshape],__init__->[super,AsyncWrapper]],run_pipeline->[DataStep,DecoderStep,add_step,RenderStep,EncoderStep,print_statistics,run,close,AsyncPipeline]]
|
Runs a pipeline of the given video.
|
I can understand the sequence size being configurable when you're using the dummy decoder, but does it make sense to override it when using a real decoder? IMO, the code should only allow the `--seq` option to be set when `-m_de` is unspecified, and otherwise derive the sequence size from the decoder's input shape.
|
@@ -53,10 +53,11 @@
</ul>
</dd>
</dl>
- <router-link tag="button" type="submit"
- :to="{name: '<%= jhiPrefixCapitalized %>User'}"
- class="btn btn-info">
+ <router-link custom v-slot="{ navigate }"
+ :to="{name: '<%= jhiPrefixCapitalized %>User'}">
+ <button @click="navigate" type="submit" class="btn btn-info">
<font-awesome-icon icon="arrow-left"></font-awesome-icon> <span v-text="$t('entity.action.back')"> Back</span>
+ </button>
</router-link>
</div>
</div>
|
[No CFG could be retrieved]
|
Components for the n - node .
|
Should it be really a `submit` button and not just a `button` (was already before the case)?
|
@@ -33,7 +33,7 @@ class LinkTag < LiquidTagBase
path.slice!(0) if path.starts_with?("/") # remove leading slash if present
path.slice!(-1) if path.ends_with?("/") # remove trailing slash if present
extracted_hash = Addressable::Template.new("{username}/{slug}").extract(path)&.symbolize_keys
- raise StandardError, "The article you're looking for does not exist: {% link #{slug} %}" unless extracted_hash
+ raise StandardError, "The article you're looking for does not exist: #{slug}" unless extracted_hash
extracted_hash
end
|
[LinkTag->[render->[render],find_article_by_org->[first,find_by],get_article->[article_hash,strip,find_article_by_org,find_article_by_user],article_hash->[app_domain,path,starts_with?,ends_with?,port,parse,casecmp?,host,raise,symbolize_keys,slice!,blank?],initialize->[get_article,title],find_article_by_user->[first,find_by],freeze,include],register_tag]
|
Returns the hash of the article with the given slug.
|
I changed this error message because I feel the main piece of information is the link that was passed in, not the syntax for the liquid tag.
|
@@ -550,7 +550,8 @@ func GenerateRegionGuideFunc(enableLog bool) RegionGuideFunc {
// Once flow has changed, will update the cache.
// Because keys and bytes are strongly related, only bytes are judged.
if region.GetRoundBytesWritten() != origin.GetRoundBytesWritten() ||
- region.GetRoundBytesRead() != origin.GetRoundBytesRead() {
+ region.GetRoundBytesRead() != origin.GetRoundBytesRead() ||
+ region.flowRoundDivisor < origin.flowRoundDivisor {
saveCache, needSync = true, true
}
|
[shouldRemoveFromSubTree->[GetLearners,GetPendingPeers,GetVoters],SetRegion->[GetRegion,GetPendingPeers,Get,GetLearners,GetEndKey,AddNew,GetID,GetStartKey,GetVoters],removeRegionFromSubTree->[GetPeers],GetPeers->[GetPeers],GetAdjacentRegions->[GetID,GetStartKey,GetEndKey,GetRegion],GetWriteRate->[GetInterval],GetMetaRegions->[Clone,Len],GetStoreRegionSize->[GetStoreLearnerRegionSize,GetStoreFollowerRegionSize,GetStoreLeaderRegionSize],updateSubTreeStat->[GetLearners,GetPendingPeers,GetVoters],GetRegionCount->[Len],Len->[Len],GetDownPeer->[GetPeer],GetDiffFollowers->[GetFollowers],GetWriteLoads->[GetBytesWritten,GetWriteQueryNum,GetKeysWritten],GetLoads->[GetBytesWritten,GetBytesRead,GetKeysWritten,GetReadQueryNum,GetKeysRead,GetWriteQueryNum],GetDownVoter->[GetPeer],AddNew->[GetID],SearchRegion->[GetID,GetRegion],GetRegion->[Get],GetRegions->[Len],String->[String,Clone],Less->[GetPeer],SearchPrevRegion->[GetID,GetRegion],RemoveRegion->[Delete,GetID],Clone->[Clone],GetDownLearner->[GetPeer],GetFollowers->[GetVoters],GetFollower->[GetVoters],ScanRange->[GetID,GetStartKey,GetRegion],GetPeers,GetPendingPeers,GetID,GetEndKey,GetRoundBytesWritten,GetRoundBytesRead,GetMeta,GetApproximateSize,GetApproximateKeys,GetLeader,GetReplicationStatus,GetPeer,GetRegionEpoch,GetStartKey,GetDownPeers]
|
regionMap wraps a regionItem and supports randomly pick a region. regionTree - Tree constructor.
|
Why can we skip sync when `divisor` becomes greater?
|
@@ -56,6 +56,7 @@ public class FreeStyleProject extends Project<FreeStyleProject,FreeStyleBuild> i
return FreeStyleBuild.class;
}
+ @Override
public DescriptorImpl getDescriptor() {
return (DescriptorImpl)Jenkins.getInstance().getDescriptorOrDie(getClass());
}
|
[FreeStyleProject->[DescriptorImpl->[newInstance->[FreeStyleProject]]]]
|
Get the build class for this build.
|
Please don't include general cleanup in otherwise unrelated files in your PR.
|
@@ -47,8 +47,10 @@ final class PublishMercureUpdatesListener
private $createdEntities;
private $updatedEntities;
private $deletedEntities;
+ private $requestStack;
+ private $formats;
- public function __construct(ResourceClassResolverInterface $resourceClassResolver, IriConverterInterface $iriConverter, ResourceMetadataFactoryInterface $resourceMetadataFactory, SerializerInterface $serializer, MessageBusInterface $messageBus = null, callable $publisher = null, ExpressionLanguage $expressionLanguage = null)
+ public function __construct(ResourceClassResolverInterface $resourceClassResolver, IriConverterInterface $iriConverter, ResourceMetadataFactoryInterface $resourceMetadataFactory, SerializerInterface $serializer, MessageBusInterface $messageBus = null, callable $publisher = null, RequestStack $requestStack, array $formats, ExpressionLanguage $expressionLanguage = null)
{
if (null === $messageBus && null === $publisher) {
throw new InvalidArgumentException('A message bus or a publisher must be provided.');
|
[PublishMercureUpdatesListener->[publishUpdate->[serialize,dispatch,getIriFromItem],onFlush->[getScheduledEntityInsertions,getUnitOfWork,getScheduledEntityUpdates,storeEntityToPublish,getScheduledEntityDeletions],postFlush->[reset,publishUpdate],storeEntityToPublish->[getAttribute,isResourceClass,getObjectClass,evaluate,getIriFromItem],__construct->[reset]]]
|
Initializes the object with the given parameters.
|
We shouldn't use request stack here, because it makes the feature not fully compliant with GraphQL, and create a hard dependency to Symfony (we want to support Laravel and maybe PSR-7 at some point).
|
@@ -22,7 +22,7 @@ namespace CoreNodeModels.Input
[SupressImportIntoVM]
[IsDesignScriptCompatible]
[AlsoKnownAs("DSCoreNodesUI.Input.IntegerSlider")]
- public class IntegerSlider : SliderBase<int>
+ public class IntegerSlider : SliderBase<long>
{
/// <summary>
/// The NodeType property provides a name which maps to the
|
[IntegerSlider->[SerializeCore->[SerializeCore],DeserializeCore->[DeserializeCore],UpdateValueCore->[UpdateValueCore]]]
|
IntegerSlider provides a base class for all integer input nodes. Creates a base class for the IntegerSlider class.
|
@mjkkirschner is this an API break too?
|
@@ -506,7 +506,7 @@ module.exports = class huobipro extends Exchange {
return this.parseOHLCVs (response['data'], market, timeframe, since, limit);
}
- async loadAccounts (reload = false) {
+ async loadAccounts (reload = false, params = {}) {
if (reload) {
this.accounts = await this.fetchAccounts ();
} else {
|
[No CFG could be retrieved]
|
Get the most recent order order trades for a given symbol. Get currencies settings.
|
Forgot to pass `params` to `fetchAccounts` here )
|
@@ -29,14 +29,14 @@ public interface TicketState {
* @return the previous time used.
*/
- long getPreviousTimeUsed();
+ ZonedDateTime getPreviousTimeUsed();
/**
* Get the time the ticket was created.
*
* @return the creation time of the ticket.
*/
- long getCreationTime();
+ ZonedDateTime getCreationTime();
/**
* Authentication information from the ticket. This may be null.
|
[No CFG could be retrieved]
|
get the time used by the previous request.
|
Don't think this will work with JPA ticket registries. We'll likely run into either schema or serialization issues. Let's keep this as long.
|
@@ -59,7 +59,7 @@ export class IframeMessagingClient {
/**
* Send a postMessage to Host Window
* @param {object} message The message to send.
- * @private
+ * @protected
*/
messageHost_(message) {
this.getHostWindow().postMessage/*OK*/(message, '*');
|
[No CFG could be retrieved]
|
Construct a new object. Check if the message starts with amp - and if so parse the payload.
|
Why `@protected`? Why should this be overridden? Trailing underscore means `@private`, so would have to be removed.
|
@@ -91,14 +91,12 @@ public class LogoutResourceIT {
private WebTestClient webTestClient;
<%_ } _%>
- private OidcIdToken idToken;
-
@BeforeEach
public void before() <% if (!reactive) { %>throws Exception <% } %>{
Map<String, Object> claims = new HashMap<>();
claims.put("groups", Collections.singletonList("ROLE_USER"));
claims.put("sub", 123);
- this.idToken = new OidcIdToken(ID_TOKEN, Instant.now(), Instant.now().plusSeconds(60), claims);
+ OidcIdToken idToken = new OidcIdToken(ID_TOKEN, Instant.now(), Instant.now().plusSeconds(60), claims);
<%_ if (!reactive) { _%>
SecurityContextHolder.getContext().setAuthentication(authenticationToken(idToken));
|
[No CFG could be retrieved]
|
A LogoutResource is a REST controller that logs out the user. It is a login resource Logout of the current session.
|
idToken is used in getLogoutInformation method
|
@@ -230,11 +230,13 @@ namespace System.Data
#endregion
#region IBindingList
-// TODO: Enable after System.ComponentModel.TypeConverter is annotated
+ // TODO: Enable after System.ComponentModel.TypeConverter is annotated
#nullable disable
/// <summary>
/// Clears both expression-based and DataView's string-based sorting.
/// </summary>
+ [UnconditionalSuppressMessage("ReflectionAnalysis", "IL2026:UnrecognizedReflectionPattern",
+ Justification = "This whole class is unsafe. Constructors are marked as such.")]
void IBindingList.RemoveSort()
{
base.Sort = string.Empty;
|
[LinqDataView->[FindByKey->[FindByKey],SetIndex->[SetIndex],FindRowsByKey->[FindRowsByKey]]]
|
Remove the sort property from the binding list.
|
What is unsafe about the two methods below? Can these suppressions (and thus the RUC on the ctor) be removed on this class?
|
@@ -80,7 +80,7 @@ func commitCmd(c *cli.Context) error {
case "oci":
mimeType = buildah.OCIv1ImageManifest
if c.IsSet("message") {
- return errors.Errorf("messages cannot be added to the OCIv1 image format.")
+ return errors.Errorf("messages are only compatible with the docker image format (-f docker)")
}
case "docker":
mimeType = buildah.Dockerv2ImageManifest
|
[ID,Wrapf,Commit,Println,Args,GetSystemContext,StringSlice,Bool,Shutdown,GetRuntime,ToUpper,LookupContainer,String,Errorf,StringInSlice,GetConfig,Split,IsSet]
|
commitCmd commits a container with the specified message and changes to the instructions with the specified change finds the container and commits it.
|
@rhatdan question, does the first docker here need to be capped?
|
@@ -1850,9 +1850,10 @@ class BaseRaw(ProjMixin, ContainsMixin, UpdateChannelsMixin, SetChannelsMixin,
show=show, block=block, n_jobs=n_jobs,
axes=axes, verbose=verbose)
- @fill_doc
+ @verbose
def estimate_rank(self, tstart=0.0, tstop=30.0, tol=1e-4,
- return_singular=False, picks=None, scalings='norm'):
+ return_singular=False, picks=None, scalings='norm',
+ verbose=None):
"""Estimate rank of the raw data.
This function is meant to provide a reasonable estimate of the rank.
|
[BaseRaw->[notch_filter->[notch_filter],apply_function->[_check_fun],_preload_data->[_read_segment],crop->[_update_times,set_annotations],__setitem__->[_parse_get_set_params],resample->[_update_times,resample],append->[_read_segment,_update_times,append,set_annotations],estimate_rank->[time_as_index],apply_hilbert->[apply_function],__getitem__->[_read_segment,_parse_get_set_params],save->[time_as_index]],_write_raw->[close,_write_raw],_start_writing_raw->[append],_check_update_montage->[append]]
|
Plot a TIFF plot of the raw data. Estimate the rank of a in the current time series.
|
I would consider deprecating this method to avoid having 2 (inconsistant) ways of computing the rank from raw data.
|
@@ -94,6 +94,10 @@ class Server:
self.options = options
if os.path.isfile(STATUS_FILE):
os.unlink(STATUS_FILE)
+ if self.fine_grained:
+ options.incremental = True
+ options.show_traceback = True
+ options.cache_dir = os.devnull
def serve(self) -> None:
"""Serve requests, synchronously (no thread or fork)."""
|
[Server->[cmd_hang->[sleep],run_command->[getattr,method],cmd_status->[get_meminfo,update],cmd_check->[StringIO,create_source_list,getvalue,check],cmd_recheck->[check],check->[stats_summary,get_meminfo,build,append,get_stats,update,join,GcLogger],serve->[sendall,dump,write,getsockname,run_command,close,pop,getpid,unlink,receive,exit,isinstance,dumps,open,create_listening_socket,accept],create_listening_socket->[abspath,listen,bind,unlink,exists,socket],__init__->[unlink,isfile,process_options,exit]],get_meminfo->[getpid,memory_info,getrusage,Process],daemonize->[fork,umask,print,flush,waitpid,setsid,dup2,_exit,close,open,fileno,func]]
|
Initialize the server with the desired mypy flags.
|
Is this also going to be true for the "real" version of fine-grained?
|
@@ -323,3 +323,13 @@ func isVMSSNameInAgentPoolsArray(vmss string, cs *api.ContainerService) bool {
}
return false
}
+
+func azureStackImageBaseOverrides(kc *api.KubernetesConfig) {
+ // Azure Stack's custom hyperkube image is now hosted along with MS' images
+ // Override KubernetesImageBase/KubernetesImageBaseType if apimodel is set to the deprecated KubernetesImageBase
+ deprecatedAzureStackImageBase := "mcr.microsoft.com/k8s/azurestack/core/"
+ if strings.EqualFold(kc.KubernetesImageBase, deprecatedAzureStackImageBase) {
+ kc.KubernetesImageBase = "mcr.microsoft.com/"
+ kc.KubernetesImageBaseType = "mcr"
+ }
+}
|
[run->[validate,loadCluster],initialize->[validateTargetVersion]]
|
True if the node is a node and false if it is a node.
|
@CecileRobertMichon do you have any thoughts on doing this override here? Is this preferable to the pattern we have currently, where we pass along upgrade context through the code flow and do upgrade-specific mutations during defaults enforcement?
|
@@ -861,6 +861,9 @@ class SemanticAnalyzer(NodeVisitor):
return None
return Instance(cast(TypeInfo, sym.node), [])
+ def external_instance(self, qualified_name: str, args: List[Type] = None) -> Instance:
+ return Instance(self.named_type_or_none(qualified_name).type, args or None)
+
def is_instance_type(self, t: Type) -> bool:
return isinstance(t, Instance)
|
[SemanticAnalyzer->[analyze_comp_for->[analyze_lvalue],make_namedtuple_init->[named_type,make_argument],visit_for_stmt->[visit_block,visit_block_maybe,analyze_lvalue],add_var->[is_func_scope,qualified_name],add_symbol->[is_func_scope],visit_import_all->[normalize_type_alias,process_import_over_existing_name,correct_relative_import],bind_class_type_variables_in_symbol_table->[bind_type_var],visit_index_expr->[anal_type],accept->[accept],visit_func_expr->[analyze_function],normalize_type_alias->[add_module_symbol],analyze_simple_literal_type->[named_type_or_none],anal_type->[anal_type],is_class_scope->[is_func_scope],visit_with_stmt->[visit_block,analyze_lvalue],analyze_types->[anal_type],process_typevar_parameters->[expr_to_analyzed_type,object_type],visit_type_application->[anal_type],build_namedtuple_typeinfo->[named_type],visit_assignment_stmt->[anal_type],analyze_try_stmt->[analyze_lvalue],store_declared_types->[store_declared_types],parse_namedtuple_fields_with_types->[anal_type],analyze_function->[next_function_tvar_id],visit_block_maybe->[visit_block],find_type_variables_in_type->[find_type_variables_in_type],visit_member_expr->[normalize_type_alias],visit_while_stmt->[visit_block_maybe],analyze_tuple_or_list_lvalue->[analyze_lvalue],visit_cast_expr->[anal_type],lookup_qualified->[lookup,normalize_type_alias],visit__promote_expr->[anal_type],is_valid_del_target->[is_valid_del_target],visit_if_stmt->[visit_block_maybe,visit_block],analyze_lvalue->[analyze_lvalue]],ThirdPass->[visit_func_def->[analyze],accept->[accept],visit_assignment_stmt->[analyze],visit_class_def->[analyze],visit_file->[accept],visit_cast_expr->[analyze],analyze->[accept],visit_decorator->[builtin_type],visit_type_application->[analyze]],FirstPass->[visit_func_def->[check_no_global,qualified_name,is_conditional_func],process_nested_classes->[process_nested_classes],visit_while_stmt->[accept],visit_import->[add_symbol],visit_assignment_stmt->[analyze_lvalue],visit_class_def->[check_no_global,qualified_name],visit_try_stmt->[analyze_try_stmt],analyze->[named_type,qualified_name,accept],visit_overloaded_func_def->[check_no_global,qualified_name],visit_for_stmt->[accept,analyze_lvalue],visit_if_stmt->[accept],visit_import_from->[add_symbol],visit_block->[accept],visit_with_stmt->[accept,analyze_lvalue],visit_decorator->[qualified_name,add_symbol],analyze_lvalue->[analyze_lvalue]],returns_any_if_called->[returns_any_if_called],calculate_class_mro->[fail],replace_implicit_first_type->[replace_implicit_first_type],mark_block_unreachable->[accept],find_fixed_callable_return->[find_fixed_callable_return]]
|
Lookup a type by name or None.
|
The second argument shouldn't be `None` -- it should be an empty list if there are no arguments.
|
@@ -123,7 +123,7 @@ def _stdio_stream_as(src_fd, dst_fd, dst_sys_attribute, mode):
@contextmanager
-def stdio_as(stdout_fd, stderr_fd, stdin_fd):
+def stdio_as(stdout_fd, stderr_fd, stdin_fd, binary_mode=False):
"""Redirect sys.{stdout, stderr, stdin} to alternate file descriptors.
As a special case, if a given destination fd is `-1`, we will replace it with an open file handle
|
[open_zip->[InvalidZipPath],_copy_and_decode_env->[_os_decode],stdio_as->[_stdio_stream_as],environment_as->[setenv->[_os_encode],setenv],hermetic_environment_as->[_restore_env,environment_as,_copy_and_decode_env,_purge_env],_restore_env->[_os_encode]]
|
Redirect sys. stdout stderr and stdin to alternate file descriptors.
|
afaict, the `binary_mode=True` case here is never exercised? should there be some `if PY3` conditional logic somewhere consuming this for when pants is executed with a 3.x interpreter?
|
@@ -201,8 +201,8 @@ func (r *ReplicaChecker) checkBestReplacement(region *core.RegionInfo) *Operator
checkerCounter.WithLabelValues("replica_checker", "all_right")
return nil
}
- storeID, newScore := r.selectBestReplacement(region, oldPeer)
- if storeID == 0 {
+ newPeer, newScore := r.SelectBestReplacedPeerToAddReplica(region, oldPeer)
+ if newPeer == nil {
checkerCounter.WithLabelValues("replica_checker", "no_replacement_store")
return nil
}
|
[selectBestReplacement->[SelectBestStoreToAddReplica],checkBestReplacement->[selectBestReplacement,selectWorstPeer],checkOfflinePeer->[SelectBestPeerToAddReplica]]
|
checkBestReplacement checks if the region is best replacement for a peer. If it is it.
|
I was intentional to return `storeID` instead of `peer` to avoid alloc too many IDs during replacement check.
|
@@ -2,11 +2,14 @@
// This file is subject to the terms and conditions defined in
// file 'LICENSE.txt', which is part of this source code package.
+using System;
+
namespace Microsoft.Xna.Framework.Input
{
/// <summary>
/// A struct that represents the current button states for the controller.
/// </summary>
+ [Serializable]
public struct GamePadButtons
{
internal readonly Buttons _buttons;
|
[GamePadButtons->[B,_buttons,X,RightShoulder,RightStick,Pressed,A,BigButton,LeftShoulder,Back,Start,Y,Released,LeftStick]]
|
Creates a new object that represents the current button states for the controller. - Gets the value indicating if the button X is pressed or released.
|
I assume you're checking with XNA and these are cases where we missed having it serializable? Can you add `[DataContract]` to these as well if that is the case?
|
@@ -1,6 +1,10 @@
FactoryGirl.define do
Faker::Config.locale = 'en-US'
+ sequence :mobile do |n|
+ (999_000_0000 + n).to_s
+ end
+
factory :user do
confirmed_at Time.current
email { Faker::Internet.safe_email }
|
[email,factory,password,password_confirmation,trait,mobile_id,id,second_factor_confirmed_at,current,second_factor_ids,role,safe_email,mobile,confirmed_at,define,locale]
|
Create a new user object.
|
change to string to avoid rubocop violation or disable cop locally since it's phone number
|
@@ -182,6 +182,8 @@ class NewCppInfo(object):
current_values = result.get_init("requires", [])
current_values.extend(component.requires)
+ # We copy the properties from the root object, even if we have components
+ result._generator_properties = copy.copy(self._generator_properties)
# FIXME: What to do about sysroot?
else:
result = copy.copy(self.components[None])
|
[from_old_cppinfo->[merge,NewCppInfo,clear_none],fill_old_cppinfo->[copy,_NewComponent],NewCppInfo->[aggregated_components->[NewCppInfo,get_init,_NewComponent,get_sorted_components],copy->[copy,NewCppInfo,_NewComponent],merge->[merge_list],__init__->[_NewComponent]]]
|
Aggregates all the components as global values returning a new NewCppInfo object.
|
Could this be breaking? I think it shouldn't, the aggregated_components is used only by the new generators, and it makes sense that aggregating the components in a single cppinfo, keeps the root properties. The aggregation cannot be done for the component's properties because they cannot be aggregated but keeping the root one make sense. right?
|
@@ -54,10 +54,12 @@ export function connect() {
/**
* Closes connection.
*
+ * @param {boolean} requestFeedback - Whether or not to attempt showing a
+ * request for call feedback.
* @returns {Function}
*/
-export function disconnect() {
+export function disconnect(requestFeedback: boolean = false) {
// XXX For web based version we use conference hanging up logic from the old
// app.
- return () => APP.conference.hangup();
+ return () => APP.conference.hangup(requestFeedback);
}
|
[No CFG could be retrieved]
|
Closes a connection to the app.
|
This change supports the new hangup button in the new toolbar, which will call this action directly instead of emitting out through APP.UI.emitEvent. On hang up feedback should be requested.
|
@@ -26,6 +26,7 @@ class NodeTest(TestRunnerTaskMixin, NodeTask):
def prepare(cls, options, round_manager):
super().prepare(options, round_manager)
round_manager.require_data(NodePaths)
+ round_manager.require_data(NodePathsLocal)
@classmethod
def supports_passthru_args(cls):
|
[NodeTest->[_validate_target->[TargetDefinitionException,is_node_module,len],prepare->[super,require_data],_execute->[,get_passthru_args,reference,debug,get_data,get_package_manager,node_path,_get_test_targets,pushd,format,run_script,TaskError],_run_node_distribution_command->[spawn_and_wait],__init__->[super],_spawn->[output,run,SubprocessProcessHandler]]]
|
Check if the node supports passthru arguments.
|
A node_test shouldn't depend on a NodePathsLocal because there isn't any inflection points.
|
@@ -50,8 +50,8 @@
</span>
</div>
- <div class="pad" *ngFor="let thread of getThreads()">
- <h6>
+ <div id="modalThreadsMetrics" class="pad" *ngFor="let thread of getThreads()">
+ <h6 id="modalThreadsMetrics{{ thread.threadId }}">
<span class="badge" [ngClass]="getBadgeClass(thread.threadState)">{{ thread.threadState }}</span>
{{ thread.threadName }} (ID {{ thread.threadId }})
|
[No CFG could be retrieved]
|
Outputs a list of the top - level components of a thread - id sequence. Renders a single unique id in the network.
|
Here `id="modalThreadsMetrics"` is not best reference for `aria-describedby`. `id="modalThreadsMetrics{{ thread.threadId }}"` is unused. Maybe put `{{ thread.threadName }}` into `span` and give it id which includes `{{ thread.threadId }}` and in table use the same id in `aria-describedby` attribute?
|
@@ -45,6 +45,7 @@ public final class PlanJsonMapper {
mapper.enable(DeserializationFeature.FAIL_ON_NULL_FOR_PRIMITIVES);
mapper.enable(DeserializationFeature.FAIL_ON_NULL_CREATOR_PROPERTIES);
mapper.enable(DeserializationFeature.FAIL_ON_INVALID_SUBTYPE);
+ mapper.setSerializationInclusion(Include.NON_EMPTY);
return mapper;
}
}
|
[PlanJsonMapper->[create->[ObjectMapper,registerModules,Jdk8Module,KsqlParserSerializationModule,KsqlTypesDeserializationModule,enable,JavaTimeModule,KsqlTypesSerializationModule]]]
|
Creates a new object mapper that will read all the objects from the KnowledgeBase.
|
This configures Jackson to exclude `null`, `Optional.empty()` and empty collections when serializing.
|
@@ -38,9 +38,10 @@ func resourceComputeInstanceGroupManager() *schema.Resource {
},
"version": &schema.Schema{
- Type: schema.TypeList,
- Optional: true,
- Computed: true,
+ Type: schema.TypeList,
+ Optional: true,
+ Computed: true,
+ Deprecated: "Use the instance_group_manager resource in the google-beta provider instead. See https://terraform.io/docs/providers/google/provider-versions.html for more details.",
Elem: &schema.Resource{
Schema: map[string]*schema.Schema{
"name": &schema.Schema{
|
[Patch,SetPartial,SetNamedPorts,Delete,StringInSlice,Partial,Set,Error,GetOk,HasChange,RecreateInstances,Errorf,SetId,SetAutoHealingPolicies,Timeout,Do,Contains,Id,SetInstanceTemplate,Get,SetTargetPools,Printf,Resize,Sprintf,ListManagedInstances,WaitForState,Insert,IntBetween,Sleep]
|
google Imports imports a schema for the managed object Schema for the n - tuple header.
|
What do you think about prefixing these fields with a message that the field is beta? "This field is beta, use the instance_group_manager resource..." Reading the message as it stands suggests that the resource itself is beta to me.
|
@@ -1561,6 +1561,8 @@ void capi_free_key(CAPI_KEY * key)
CryptReleaseContext(key->hprov, 0);
if (key->pcert)
CertFreeCertificateContext(key->pcert);
+ if (key->id)
+ OPENSSL_free(key->id);
OPENSSL_free(key);
}
|
[No CFG could be retrieved]
|
Get a key from the CAPI. Frees all fields of a CAPI_CTX object.
|
NULL check not needed.
|
@@ -581,16 +581,13 @@ def plot_alignment(info, trans=None, subject=None, subjects_dir=None,
It corresponds to Freesurfer environment variable SUBJECTS_DIR.
surfaces : str | list
Surfaces to plot. Supported values:
-
* scalp: one of 'head', 'outer_skin' (alias for 'head'),
'head-dense', or 'seghead' (alias for 'head-dense')
* skull: 'outer_skull', 'inner_skull', 'brain' (alias for
'inner_skull')
* brain: one of 'pial', 'white', 'inflated', or 'brain'
(alias for 'pial').
-
Defaults to 'head'.
-
.. note:: For single layer BEMs it is recommended to use 'brain'.
coord_frame : str
Coordinate frame to use, 'head', 'meg', or 'mri'.
|
[_plot_mpl_stc->[_handle_time,_limits_to_control_points,_smooth_plot],plot_vector_source_estimates->[_handle_time,_limits_to_control_points,_get_ps_kwargs],plot_source_estimates->[_plot_mpl_stc,_limits_to_control_points,_handle_time],_plot_dipole->[_pivot_kwargs],_sensor_shape->[_make_tris_fan],plot_alignment->[_fiducial_coords,_create_mesh_surf],plot_volume_source_estimates->[plot_and_correct->[_glass_brain_crosshairs],_onclick->[_update_timeslice,_get_cut_coords_glass_brain,_get_cut_coords_stat_map,_cut_coords_to_idx],_limits_to_control_points,plot_and_correct],_dipole_changed->[_plot_dipole]]
|
Plots the head sensor and source space alignment in 3D. Plot a specific . Returns a figure object that represents a single non - empty . Computes the n - ary non - zero value of a single - ary . Get a list of all possible components of a .
|
@GuillaumeFavelier can you avoid such cosmetic changes? it makes diff longer to read for little gain. thx
|
@@ -75,6 +75,9 @@ class CentOSConfigurator(configurator.ApacheConfigurator):
# Finish with actual config check to see if systemctl restart helped
super(CentOSConfigurator, self).config_test()
+ def _pick_apache_config(self):
+ return configurator.find_ssl_apache_conf("centos")
+
def _prepare_options(self):
"""
Override the options dictionary initialization in order to support
|
[CentOSParser->[not_modssl_ifmodule->[parts,lower,len,rpartition,get_all_args],__init__->[super],update_runtime_variables->[super,parse_sysconfig_var],parse_sysconfig_var->[parse_define_file]],CentOSConfigurator->[_deploy_cert->[super,_deploy_loadmodule_ssl_if_needed],_deploy_loadmodule_ssl_if_needed->[get_ifmod,not_modssl_ifmodule,create_ifmod,split,lower,get_aug_path,append,MisconfigurationError,add_dir,remove,find_dir,rpartition,get_all_args],get_parser->[option,CentOSParser],_try_restart_fedora->[str,MisconfigurationError,run_script,super],_prepare_options->[option,super],config_test->[get_os_info,super,os_info,_try_restart_fedora],dict,resource_filename],getLogger,provider]
|
Tries to restart Fedora using systemctl.
|
We should probably fork the CentOS include into two as as well - the newest (CentOS 8) has Apache version 2.4.35 that supports TLS session tickets. We could probably just call the ancestor class with `centos-old` and `centos-current` prefixes.
|
End of preview. Expand
in Data Studio
README.md exists but content is empty.
- Downloads last month
- 8