I've tried many times to fix, also I've used the example codes from functional.py then I got my same "loss" value. How can I fix this?
My libraries:
import matplotlib.pyplot as plt
import torch
import torch.nn as nn
import numpy as np
import matplotlib
import pandas as pd
from torch.autograd import Variable
from torch.utils.data import DataLoader,TensorDataset
from sklearn.model_selection import train_test_split
import warnings
import os
import torchvision
import torchvision.datasets as dsets
import torchvision.transforms as transforms
train=pd.read_csv("train.csv",dtype=np.float32)
targets_numpy = train.label.values
features_numpy = train.loc[:,train.columns != "label"].values/255 # normalization
features_train, features_test, targets_train, targets_test = train_test_split(features_numpy, targets_numpy,test_size = 0.2, random_state = 42)
featuresTrain=torch.from_numpy(features_train)
targetsTrain=torch.from_numpy(targets_train)
featuresTest=torch.from_numpy(features_test)
targetsTest=torch.from_numpy(targets_test)
batch_size=100
n_iterations=10000
num_epochs=n_iterations/(len(features_train)/batch_size)
num_epochs=int(num_epochs)
train=torch.utils.data.TensorDataset(featuresTrain,targetsTrain)
test=torch.utils.data.TensorDataset(featuresTest,targetsTest)
print(type(train))
train_loader=DataLoader(train,batch_size=batch_size,shuffle=False)
test_loader=DataLoader(test,batch_size=batch_size,shuffle=False)
print(type(train_loader))
plt.imshow(features_numpy[226].reshape(28,28))
plt.axis("off")
plt.title(str(targets_numpy[226]))
plt.show()
class ANNModel(nn.Module):
def __init__(self,input_dim,hidden_dim,output_dim):
super(ANNModel,self).__init__()
self.fc1=nn.Linear(input_dim,hidden_dim)
self.relu1=nn.ReLU()
self.fc2=nn.Linear(hidden_dim,hidden_dim)
self.tanh2=nn.Tanh()
self.fc4=nn.Linear(hidden_dim,output_dim)
def forward (self,x): #forward ile elde edilen layer lar bağlanır
out=self.fc1(x)
out=self.relu1(out)
out=self.fc2(out)
out=self.tanh2(out)
out=self.fc4(out)
return out
input_dim=28*28
hidden_dim=150
output_dim=10
model=ANNModel(input_dim,hidden_dim,output_dim)
error=nn.CrossEntropyLoss()
learning_rate=0.02
optimizer=torch.optim.SGD(model.parameters(),lr=learning_rate)
count=0
loss_list=[]
iteration_list=[]
accuracy_list = []
for epoch in range(num_epochs):
for i,(images,labels) in enumerate(train_loader):
train=Variable(images.view(-1,28*28))
labels=Variable(labels)
#print(labels)
#print(outputs)
optimizer.zero_grad()
#forward propagation
outputs=model(train)
#outputs=torch.randn(784,10,requires_grad=True)
##labels=torch.randn(784,10).softmax(dim=1)
loss=error(outputs,labels)
loss.backward()
optimizer.step()
count+=1
if count % 50 == 0:
correct=0
total=0
for images,labels in test_loader:
test=Variable(images.view(-1,28*28))
outputs=model(test)
predicted=torch.max(outputs.data,1)[1] #mantık???
total+= len(labels)
correct+=(predicted==labels).sum()
accuracy=100 *correct/float(total)
loss_list.append(loss.data)
iteration_list.append(count)
accuracy_list.append(accuracy)
if count % 500 == 0:
print('Iteration: {} Loss: {} Accuracy: {} %'.format(count, loss.data, accuracy))
Error:
---------------------------------------------------------------------------
RuntimeError Traceback (most recent call last)
<ipython-input-9-9e53988ad250> in <module>()
26 #outputs=torch.randn(784,10,requires_grad=True)
27 ##labels=torch.randn(784,10).softmax(dim=1)
---> 28 loss=error(outputs,labels)
29
30
2 frames
/usr/local/lib/python3.7/dist-packages/torch/nn/functional.py in cross_entropy(input, target, weight, size_average, ignore_index, reduce, reduction, label_smoothing)
2844 if size_average is not None or reduce is not None:
2845 reduction = _Reduction.legacy_get_string(size_average, reduce)
-> 2846 return torch._C._nn.cross_entropy_loss(input, target, weight, _Reduction.get_enum(reduction), ignore_index, label_smoothing)
2847
2848
RuntimeError: expected scalar type Long but found Float
softmaxfunction, since you are interested in the probabilities of any sample belonging to 10 mutually exclusive classes.self.softmax = nn.Softmax(dim=1). Now apply this after your final output in theforwardfunction. Soout = self.softmax(out)and return this value.