2024-02-21 22:09:32 +01:00
|
|
|
# Pytorch
|
|
|
|
|
|
|
|
## Run a docker container
|
|
|
|
|
|
|
|
This container runs in background
|
|
|
|
|
|
|
|
```bash
|
|
|
|
docker run -d -it --gpus all -name pytorch-container pytorch/pytorch:latest
|
|
|
|
```
|
|
|
|
|
|
|
|
## Connect to running container
|
|
|
|
|
|
|
|
```bash
|
|
|
|
docker exec -it <container-name> bash
|
|
|
|
docker exec -it <container-name> bash -c "cat aap"
|
|
|
|
```
|
|
|
|
|
|
|
|
## Stop and start an execistion container
|
|
|
|
|
|
|
|
```bash
|
|
|
|
docker stop <container-name>
|
|
|
|
docker start <container-name>
|
|
|
|
```
|
|
|
|
|
|
|
|
## Example code
|
2022-12-29 09:37:12 +01:00
|
|
|
|
|
|
|
```python
|
|
|
|
# Load libraries
|
|
|
|
import torch
|
|
|
|
import torch.nn as nn
|
|
|
|
from res.plot_lib import set_default, show_scatterplot, plot_bases
|
|
|
|
from matplotlib.pyplot import plot, title, axis
|
|
|
|
```
|
|
|
|
|
|
|
|
```python
|
|
|
|
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
|
|
|
|
```
|