Wandb Basics

# Install the library
!pip install wandb
# Login to your account
!wandb login
wandb: You can find your API key in your browser here: https://wandb.ai/authorize
wandb: Paste an API key from your profile and hit enter: 
wandb: Appending key for api.wandb.ai to your netrc file: /root/.netrc
# Initialize wandb
import wandb
wandb.init(project="my-first-project")
wandb: Currently logged in as: sharan19 (use `wandb login --relogin` to force relogin)
wandb: Tracking run with wandb version 0.10.2
wandb: Run data is saved locally in wandb/run-20200924_005634-2y2hgq77
wandb: Syncing run devoted-vortex-1

Run(2y2hgq77)

# Setting Hyper-parameters

wandb.config.dropout = 0.2
wandb.config.hidden_layer_size = 128
# Log metrics - You can also log images,graphs,histograms
def my_train_loop():
    for epoch in range(10):
        loss = 0
        wandb.log({'epoch':epoch,'loss':loss})

# Logging Images
wandb.log({"examples": [wandb.Image(numpy_array_or_pil, caption="Label")]})

For logging other type of media, visit:

https://docs.wandb.com/library/log

# Saving Files
wandb.save("mymodel.h5")
# Save a model file from the current directory
wandb.save('model.h5')

# Save all files that currently exist containing the substring "ckpt"
wandb.save('../logs/*ckpt*')

# Save any files starting with "checkpoint" as they're written to
wandb.save(os.path.join(wandb.run.dir, "checkpoint*"))

Keras Integration

import wandb
from wandb.keras import WandbCallback
wandb.init(config={"hyper": "parameter"})

# Magic

model.fit(X_train, y_train,  validation_data=(X_test, y_test),
          callbacks=[WandbCallback()])

Scikit-Learn Integration

# Visualize single plot
wandb.sklearn.plot_confusion_matrix(y_true, y_pred, labels)

# Visualize all classifier plots
wandb.sklearn.plot_classifier(clf, X_train, X_test, y_train, y_test, y_pred, y_probas, labels,
                                                         model_name='SVC', feature_names=None)

# All regression plots
wandb.sklearn.plot_regressor(reg, X_train, X_test, y_train, y_test,  model_name='Ridge')

# All clustering plots
wandb.sklearn.plot_clusterer(kmeans, X_train, cluster_labels, labels=None, model_name='KMeans')

Full Keras Example

import wandb
from wandb.keras import WandbCallback

from keras.datasets import fashion_mnist
from keras.models import Sequential
from keras.layers import Conv2D, MaxPooling2D, Dropout, Dense, Flatten
from keras.utils import np_utils
from keras.optimizers import SGD
from keras.callbacks import TensorBoard

# Default values for hyper-parameters
defaults=dict(
    dropout = 0.2,
    hidden_layer_size = 32,
    layer_1_size = 32,
    learn_rate = 0.01,
    decay = 1e-6,
    momentum = 0.9,
    epochs = 5,
    )

# Initialize a new wandb run and pass in the config object
# wandb.init(anonymous='allow', project="kaggle", config=defaults)

wandb.init(project="visualize-models", config=defaults, name="neural_network")
config = wandb.config

(X_train, y_train), (X_test, y_test) = fashion_mnist.load_data()
labels=["T-shirt/top","Trouser","Pullover","Dress","Coat",
        "Sandal","Shirt","Sneaker","Bag","Ankle boot"]

img_width=28
img_height=28

X_train = X_train.astype('float32')
X_train /= 255.
X_test = X_test.astype('float32')
X_test /= 255.

#reshape input data
X_train = X_train.reshape(X_train.shape[0], img_width, img_height, 1)[:10000]
X_test = X_test.reshape(X_test.shape[0], img_width, img_height, 1)[:10000]

# one hot encode outputs
y_train = np_utils.to_categorical(y_train)[:10000]
y_test = np_utils.to_categorical(y_test)[:10000]
num_classes = y_test.shape[1]

# build model
model = Sequential()
model.add(Conv2D(config.layer_1_size, (5, 5), activation='relu',
                            input_shape=(img_width, img_height,1)))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Dropout(config.dropout))
model.add(Flatten())
model.add(Dense(num_classes, activation='softmax'))

sgd = SGD(lr=config.learn_rate, decay=config.decay, momentum=config.momentum, nesterov=True)
model.compile(loss='categorical_crossentropy', optimizer=sgd, metrics=['accuracy'])
wandb: Waiting for W&B process to finish, PID 211
wandb: Program ended successfully.

wandb:                                                                                
wandb: Find user logs for this run at: wandb/run-20200924_005634-2y2hgq77/logs/debug.log
wandb: Find internal logs for this run at: wandb/run-20200924_005634-2y2hgq77/logs/debug-internal.log
wandb: Synced 5 W&B file(s), 0 media file(s), 0 artifact file(s) and 0 other file(s)
wandb: 
wandb: Synced devoted-vortex-1: https://wandb.ai/sharan19/my-first-project/runs/2y2hgq77
wandb: Tracking run with wandb version 0.10.2
wandb: Run data is saved locally in wandb/run-20200924_013038-erckq5ar
wandb: Syncing run neural_network

%%wandb
# Add WandbCallback() to the fit function
model.fit(X_train, y_train,  validation_data=(X_test, y_test), epochs=config.epochs,
    callbacks=[WandbCallback(data_type="image", labels=labels)])
Epoch 1/5
313/313 [==============================] - 7s 23ms/step - loss: 0.7694 - accuracy: 0.7219 - val_loss: 0.5558 - val_accuracy: 0.8071
Epoch 2/5
313/313 [==============================] - 8s 24ms/step - loss: 0.4964 - accuracy: 0.8257 - val_loss: 0.4829 - val_accuracy: 0.8282
Epoch 3/5
313/313 [==============================] - 8s 24ms/step - loss: 0.4345 - accuracy: 0.8500 - val_loss: 0.4887 - val_accuracy: 0.8252
Epoch 4/5
313/313 [==============================] - 8s 24ms/step - loss: 0.3952 - accuracy: 0.8610 - val_loss: 0.4090 - val_accuracy: 0.8578
Epoch 5/5
313/313 [==============================] - 8s 24ms/step - loss: 0.3692 - accuracy: 0.8698 - val_loss: 0.4131 - val_accuracy: 0.8554
<tensorflow.python.keras.callbacks.History at 0x7fc668ae2cc0>
!wandb agent sharan19/visualize-models/q3e69h7k
sweep_config = {
  "name": "My Sweep",
  "method": "grid",
  "parameters": {
        "parameter1": {
            "values": [1, 2, 3]
        }
    }
}

sweep_id = wandb.sweep(sweep_config)
Create sweep with ID: cpttnyj5
Sweep URL: https://wandb.ai/sharan19/uncategorized/sweeps/cpttnyj5
wandb.agent(sweep_id, function=train)