10 examples of 'how to check if tensorflow is using gpu' in Python

Every line of 'how to check if tensorflow is using gpu' code snippets is scanned for vulnerabilities by our powerful machine learning engine that combs millions of open source libraries, ensuring your Python code is secure.

All examples are scanned by Snyk Code

By copying the Snyk Code Snippets you agree to
152def gpu_available_in_session():
153 sess = tfv1.get_default_session()
154 for dev in sess.list_devices():
155 if dev.device_type.lower() == 'gpu':
156 return True
157 return False
204def is_gpu_available():
205 from tensorflow.python.client import device_lib
206 local_device_protos = device_lib.list_local_devices()
207 gpu_list = [x.name for x in local_device_protos if x.device_type == 'GPU']
208 if len(gpu_list) > 0:
209 print("Tensorflow GPU:", gpu_list)
210 return True
211 else:
212 return False
36def check_cuda_support():
37 """ Check if tensorflow was build with CUDA """
38 return tf.test.is_built_with_cuda()
29def linux_with_gpu():
30 """Returns if machine is running an Linux OS and has a GPU"""
31 has_gpu = is_available()
32 return is_linux() and has_gpu
251def gpu_no_of_var(var):
252 """
253 Function that returns the GPU number or whether the tensor is on GPU or not
254
255 Args:
256 var: torch tensor
257
258 Returns:
259 The CUDA device that the torch tensor is on, or whether the tensor is on GPU
260
261 """
262
263 try:
264 is_cuda = next(var.parameters()).is_cuda
265 except:
266 is_cuda = var.is_cuda
267
268 if is_cuda:
269 try:
270 return next(var.parameters()).get_device()
271 except:
272 return var.get_device()
273 else:
274 return False
211def test_tf_support_gpu_instances(sagemaker_session, tf_version):
212 tf = _build_tf(sagemaker_session, tf_version, train_instance_type="ml.g2.2xlarge")
213
214 assert tf.train_image() == _get_full_gpu_image_uri(tf_version)
215
216 tf = _build_tf(sagemaker_session, tf_version, train_instance_type="ml.p2.2xlarge")
217
218 assert tf.train_image() == _get_full_gpu_image_uri(tf_version)
9def mxnet_prefer_gpu():
10 """If gpu available return gpu, else cpu
11
12 Returns
13 -------
14 context : Context
15 The preferable GPU context.
16 """
17 gpu = int(os.environ.get('MXNET_GPU', default=0))
18 if gpu in mx.test_utils.list_gpus():
19 return mx.gpu(gpu)
20 return mx.cpu()
239@classmethod
240def supports_device(cls, device):
241 return common_supports_device(device)
17def get_gpu_count():
18 local_device_protos = device_lib.list_local_devices()
19 return len([x.name for x in local_device_protos if x.device_type == 'GPU'])
14def gpu_count():
15 return len(get_available_gpus())

Related snippets