code
stringlengths
3
6.57k
autofix_lib.Commit('message!', 'test-branch', None)
testing.git.revparse(file_config_files.dir1)
testing.git.revparse(file_config_files.dir2)
assert (rev_before1, rev_before2)
test_fix_non_default_branch(file_config_non_default)
clone.main(('--config-filename', str(file_config_non_default.cfg)
str(file_config_non_default.output_dir.join('repo1')
load_config(file_config_non_default.cfg)
autofix_lib.Commit('message!', 'test-branch', 'A B <a@a.a>')
file_config_non_default.dir1.join('f')
read()
NameGenerator(object)
__init__(self, names=None)
__call__(self)
self.names.pop(random.randrange(len(self.names)
__iter__(self)
self()
expired_token(auth_token)
timezone.now()
timezone.timedelta(hours=24)
create_auth_token(user)
Token.objects.get_or_create(user=user)
timezone.now()
token.save()
sys.exit(1)
dependency_checker.check_deps()
dependency_installer.install_deps()
dependency_checker.check_deps()
sys.exit(1)
dependency_updater.update_deps()
VortexWindow()
pyglet.app.run()
GPTTextField(TransformerTextField)
GPTEmbedder(TransformerEmbedder)
GPT2TextField(TransformerTextField)
GPT2Embedder(TransformerEmbedder)
LSTMSeq2Seq(BaseModel)
__init__(self, check_optional_config=True, future_seq_len=2)
_build_train(self, mc=False, **config)
super()
_check_config(**config)
config.get('metric', 'mean_squared_error')
config.get('latent_dim', 128)
config.get('dropout', 0.2)
config.get('lr', 0.001)
config.get('batch_size', 64)
Input(shape=(None, self.feature_num)
encoder(self.encoder_inputs, training=training)
Input(shape=(None, self.target_col_num)
Dense(self.target_col_num, name="decoder_dense")
self.decoder_dense(decoder_outputs)
Model([self.encoder_inputs, self.decoder_inputs], decoder_outputs)
keras.optimizers.RMSprop(lr=self.lr)
_restore_model(self)
_build_inference(self, mc=False)
Model(self.encoder_inputs, self.encoder_states)
Input(shape=(self.latent_dim,)
Input(shape=(self.latent_dim,)
self.decoder_dense(decoder_outputs)
_decode_sequence(self, input_seq, mc=False)
self._build_inference(mc=mc)
encoder_model.predict(input_seq)
np.zeros((len(input_seq)
np.zeros((len(input_seq)
range(self.future_seq_len)
decoder_model.predict([target_seq] + states_value)
sequence (of length 1)
np.zeros((len(input_seq)
_get_decoder_inputs(self, x, y)
of (sample_num, past_sequence_len, feature_num)
of (sample_num, future_sequence_len, target_col_num)
np.zeros(y.shape)
_get_len(self, x, y)
_expand_y(self, y)
len(y.shape)
np.expand_dims(y, axis=2)
_pre_processing(self, x, y, validation_data)
self._expand_y(y)
self._get_len(x, y)
self._get_decoder_inputs(x, y)
self._expand_y(val_y)
self._get_decoder_inputs(val_x, val_y)
fit_eval(self, data, validation_data=None, mc=False, verbose=0, **config)
form (x, y)
format (no. of samples, past sequence length, 2+feature length)
value (data type should be numeric)
format (no. of samples, future sequence length)
format (no. of samples, )
format (x_test,y_test)
self._pre_processing(x, y, validation_data)
self._build_train(mc=mc, **config)
config.get('batch_size', 64)
format(batch_size, epochs, lr, time()
TensorBoard(log_dir="logs/" + name)
config.get("epochs", 10)
print(hist.history)
self.model.evaluate(x, y)
hist.history.get(self.metric)
hist.history.get('val_' + str(self.metric)
evaluate(self, x, y, metric=['mse'])