flops-profiler icon indicating copy to clipboard operation
flops-profiler copied to clipboard

not an issue - suggestion for simpler example for readme

Open johndpope opened this issue 8 months ago • 0 comments

import torch
import torch.nn as nn
import torch.optim as optim
from torch.utils.data import DataLoader
from flops_profiler import FlopsProfiler

# Define your model class
class Model(nn.Module):
    def __init__(self):
        super(Model, self).__init__()
        # Define the layers of the model
        self.layer = nn.Linear(10, 1)

    def forward(self, x):
        return self.layer(x)

# Sample dataset and dataloader
class SampleDataset(torch.utils.data.Dataset):
    def __init__(self):
        self.data = torch.randn(100, 10)
        self.labels = torch.randn(100, 1)

    def __len__(self):
        return len(self.data)

    def __getitem__(self, idx):
        return self.data[idx], self.labels[idx]

data_loader = DataLoader(SampleDataset(), batch_size=10, shuffle=True)

# Initialize model, loss, and optimizer
model = Model()
criterion = nn.MSELoss()
optimizer = optim.SGD(model.parameters(), lr=0.01)

# Initialize profiler
prof = FlopsProfiler(model)
profile_step = 5  # Change this to the step you want to start profiling

for step, (inputs, labels) in enumerate(data_loader):
    # Start profiling at the specified step
    if step == profile_step:
        prof.start_profile()

    # Forward pass
    outputs = model(inputs)
    loss = criterion(outputs, labels)

    # End profiling and print the output
    if step == profile_step:
        prof.stop_profile()
        flops = prof.get_total_flops(as_string=True)
        macs = prof.get_total_macs(as_string=True)
        params = prof.get_total_params(as_string=True)
        prof.print_model_profile(profile_step=profile_step)
        prof.end_profile()
        print(f"Step {step}: FLOPS - {flops}, MACs - {macs}, Params - {params}")

    # Backward pass and optimization
    optimizer.zero_grad()
    loss.backward()
    optimizer.step()

johndpope avatar Jun 08 '24 22:06 johndpope