Mapped the HIV Illumina reads with bwa and attempting to analyse the sam file per instructions. On 64 (AMD) cores and 504 GB RAM;
BiasGrad requires tensor size <= int32 max
NN) to use the following CPU instructions in performance-critical operations: FMA To enable them in other operations, rebuild TensorFlow with the appropriate compiler flags. /home/scott/miniconda3/lib/python3.8/site-packages/keras/optimizer_v2/adam.py:105: UserWarning: The lrargument is deprecated, uselearning_rate` instead.
super(Adam, self).init(name, **kwargs)
Traceback (most recent call last):
File "CAECseq_viral.py", line 155, in
pretrain_history = pretrain.fit(x = SNVonehot,
File "/home/scott/miniconda3/lib/python3.8/site-packages/keras/utils/traceback_utils.py", line 67, in error_handler
raise e.with_traceback(filtered_tb) from None
File "/home/scott/miniconda3/lib/python3.8/site-packages/tensorflow/python/eager/execute.py", line 54, in quick_execute
tensors = pywrap_tfe.TFE_Py_Execute(ctx._handle, device_name, op_name,
tensorflow.python.framework.errors_impl.InvalidArgumentError: Graph execution error:
Detected at node 'gradient_tape/model_1/convT_1/BiasAdd/BiasAddGrad' defined at (most recent call last):
File "CAECseq_viral.py", line 155, in
pretrain_history = pretrain.fit(x = SNVonehot,
File "/home/scott/miniconda3/lib/python3.8/site-packages/keras/utils/traceback_utils.py", line 64, in error_handler
return fn(*args, **kwargs)
File "/home/scott/miniconda3/lib/python3.8/site-packages/keras/engine/training.py", line 1384, in fit
tmp_logs = self.train_function(iterator)
File "/home/scott/miniconda3/lib/python3.8/site-packages/keras/engine/training.py", line 1021, in train_function
return step_function(self, iterator)
File "/home/scott/miniconda3/lib/python3.8/site-packages/keras/engine/training.py", line 1010, in step_function
outputs = model.distribute_strategy.run(run_step, args=(data,))
File "/home/scott/miniconda3/lib/python3.8/site-packages/keras/engine/training.py", line 1000, in run_step
outputs = model.train_step(data)
File "/home/scott/miniconda3/lib/python3.8/site-packages/keras/engine/training.py", line 863, in train_step
self.optimizer.minimize(loss, self.trainable_variables, tape=tape)
File "/home/scott/miniconda3/lib/python3.8/site-packages/keras/optimizer_v2/optimizer_v2.py", line 530, in minimize
grads_and_vars = self._compute_gradients(
File "/home/scott/miniconda3/lib/python3.8/site-packages/keras/optimizer_v2/optimizer_v2.py", line 583, in _compute_gradients
grads_and_vars = self._get_gradients(tape, loss, var_list, grad_loss)
File "/home/scott/miniconda3/lib/python3.8/site-packages/keras/optimizer_v2/optimizer_v2.py", line 464, in _get_gradients
grads = tape.gradient(loss, var_list, grad_loss)
Node: 'gradient_tape/model_1/convT_1/BiasAdd/BiasAddGrad'
BiasGrad requires tensor size <= int32 max
[[{{node gradient_tape/model_1/convT_1/BiasAdd/BiasAddGrad}}]] [Op:__inference_train_function_1903]
real 71m48.737s
user 426m40.778s
sys 198m41.867s
config.txt
`
Mapped the HIV Illumina reads with bwa and attempting to analyse the sam file per instructions. On 64 (AMD) cores and 504 GB RAM;
BiasGrad requires tensor size <= int32 max
NN) to use the following CPU instructions in performance-critical operations: FMA To enable them in other operations, rebuild TensorFlow with the appropriate compiler flags. /home/scott/miniconda3/lib/python3.8/site-packages/keras/optimizer_v2/adam.py:105: UserWarning: Thelrargument is deprecated, uselearning_rate` instead.super(Adam, self).init(name, **kwargs)
Traceback (most recent call last):
File "CAECseq_viral.py", line 155, in
pretrain_history = pretrain.fit(x = SNVonehot,
File "/home/scott/miniconda3/lib/python3.8/site-packages/keras/utils/traceback_utils.py", line 67, in error_handler
raise e.with_traceback(filtered_tb) from None
File "/home/scott/miniconda3/lib/python3.8/site-packages/tensorflow/python/eager/execute.py", line 54, in quick_execute
tensors = pywrap_tfe.TFE_Py_Execute(ctx._handle, device_name, op_name,
tensorflow.python.framework.errors_impl.InvalidArgumentError: Graph execution error:
Detected at node 'gradient_tape/model_1/convT_1/BiasAdd/BiasAddGrad' defined at (most recent call last):
File "CAECseq_viral.py", line 155, in
pretrain_history = pretrain.fit(x = SNVonehot,
File "/home/scott/miniconda3/lib/python3.8/site-packages/keras/utils/traceback_utils.py", line 64, in error_handler
return fn(*args, **kwargs)
File "/home/scott/miniconda3/lib/python3.8/site-packages/keras/engine/training.py", line 1384, in fit
tmp_logs = self.train_function(iterator)
File "/home/scott/miniconda3/lib/python3.8/site-packages/keras/engine/training.py", line 1021, in train_function
return step_function(self, iterator)
File "/home/scott/miniconda3/lib/python3.8/site-packages/keras/engine/training.py", line 1010, in step_function
outputs = model.distribute_strategy.run(run_step, args=(data,))
File "/home/scott/miniconda3/lib/python3.8/site-packages/keras/engine/training.py", line 1000, in run_step
outputs = model.train_step(data)
File "/home/scott/miniconda3/lib/python3.8/site-packages/keras/engine/training.py", line 863, in train_step
self.optimizer.minimize(loss, self.trainable_variables, tape=tape)
File "/home/scott/miniconda3/lib/python3.8/site-packages/keras/optimizer_v2/optimizer_v2.py", line 530, in minimize
grads_and_vars = self._compute_gradients(
File "/home/scott/miniconda3/lib/python3.8/site-packages/keras/optimizer_v2/optimizer_v2.py", line 583, in _compute_gradients
grads_and_vars = self._get_gradients(tape, loss, var_list, grad_loss)
File "/home/scott/miniconda3/lib/python3.8/site-packages/keras/optimizer_v2/optimizer_v2.py", line 464, in _get_gradients
grads = tape.gradient(loss, var_list, grad_loss)
Node: 'gradient_tape/model_1/convT_1/BiasAdd/BiasAddGrad'
BiasGrad requires tensor size <= int32 max
[[{{node gradient_tape/model_1/convT_1/BiasAdd/BiasAddGrad}}]] [Op:__inference_train_function_1903]
real 71m48.737s
user 426m40.778s
sys 198m41.867s
config.txt
`