r:neural_network
This is an old revision of the document!
Neural Network
> install.packages("nnet")
> library(nnet)
> m <- nnet(Species ~ ., data=iris, size=3)
# weights: 27
initial value 191.494035
iter 10 value 65.618496
iter 20 value 40.493306
iter 30 value 8.542349
iter 40 value 6.034377
iter 50 value 6.000246
iter 60 value 5.998411
iter 70 value 5.983894
iter 80 value 5.972932
iter 90 value 5.968740
iter 100 value 5.965371
final value 5.965371
stopped after 100 iterations
> round(predict(m, newdata=iris),2)
setosa versicolor virginica
1 1 0.00 0.00
2 1 0.00 0.00
3 1 0.00 0.00
4 1 0.00 0.00
5 1 0.00 0.00
6 1 0.00 0.00
7 1 0.00 0.00
8 1 0.00 0.00
9 1 0.00 0.00
10 1 0.00 0.00
11 1 0.00 0.00
12 1 0.00 0.00
13 1 0.00 0.00
14 1 0.00 0.00
15 1 0.00 0.00
16 1 0.00 0.00
17 1 0.00 0.00
18 1 0.00 0.00
19 1 0.00 0.00
20 1 0.00 0.00
21 1 0.00 0.00
22 1 0.00 0.00
23 1 0.00 0.00
24 1 0.00 0.00
25 1 0.00 0.00
26 1 0.00 0.00
27 1 0.00 0.00
28 1 0.00 0.00
29 1 0.00 0.00
30 1 0.00 0.00
31 1 0.00 0.00
32 1 0.00 0.00
33 1 0.00 0.00
34 1 0.00 0.00
35 1 0.00 0.00
36 1 0.00 0.00
37 1 0.00 0.00
38 1 0.00 0.00
39 1 0.00 0.00
40 1 0.00 0.00
41 1 0.00 0.00
42 1 0.00 0.00
43 1 0.00 0.00
44 1 0.00 0.00
45 1 0.00 0.00
46 1 0.00 0.00
47 1 0.00 0.00
48 1 0.00 0.00
49 1 0.00 0.00
50 1 0.00 0.00
51 0 1.00 0.00
52 0 1.00 0.00
53 0 1.00 0.00
54 0 1.00 0.00
55 0 1.00 0.00
56 0 1.00 0.00
57 0 1.00 0.00
58 0 1.00 0.00
59 0 1.00 0.00
60 0 1.00 0.00
61 0 1.00 0.00
62 0 1.00 0.00
63 0 1.00 0.00
64 0 1.00 0.00
65 0 1.00 0.00
66 0 1.00 0.00
67 0 1.00 0.00
68 0 1.00 0.00
69 0 0.95 0.05
70 0 1.00 0.00
71 0 0.59 0.41
72 0 1.00 0.00
73 0 0.77 0.23
74 0 1.00 0.00
75 0 1.00 0.00
76 0 1.00 0.00
77 0 1.00 0.00
78 0 0.73 0.27
79 0 1.00 0.00
80 0 1.00 0.00
81 0 1.00 0.00
82 0 1.00 0.00
83 0 1.00 0.00
84 0 0.12 0.88
85 0 1.00 0.00
86 0 1.00 0.00
87 0 1.00 0.00
88 0 1.00 0.00
89 0 1.00 0.00
90 0 1.00 0.00
91 0 1.00 0.00
92 0 1.00 0.00
93 0 1.00 0.00
94 0 1.00 0.00
95 0 1.00 0.00
96 0 1.00 0.00
97 0 1.00 0.00
98 0 1.00 0.00
99 0 1.00 0.00
100 0 1.00 0.00
101 0 0.00 1.00
102 0 0.00 1.00
103 0 0.00 1.00
104 0 0.00 1.00
105 0 0.00 1.00
106 0 0.00 1.00
107 0 0.11 0.89
108 0 0.00 1.00
109 0 0.00 1.00
110 0 0.00 1.00
111 0 0.01 0.99
112 0 0.00 1.00
113 0 0.00 1.00
114 0 0.00 1.00
115 0 0.00 1.00
116 0 0.00 1.00
117 0 0.00 1.00
118 0 0.00 1.00
119 0 0.00 1.00
120 0 0.08 0.92
121 0 0.00 1.00
122 0 0.00 1.00
123 0 0.00 1.00
124 0 0.06 0.94
125 0 0.00 1.00
126 0 0.00 1.00
127 0 0.19 0.81
128 0 0.20 0.80
129 0 0.00 1.00
130 0 0.03 0.97
131 0 0.00 1.00
132 0 0.00 1.00
133 0 0.00 1.00
134 0 0.77 0.23
135 0 0.03 0.97
136 0 0.00 1.00
137 0 0.00 1.00
138 0 0.00 1.00
139 0 0.33 0.67
140 0 0.00 1.00
141 0 0.00 1.00
142 0 0.00 1.00
143 0 0.00 1.00
144 0 0.00 1.00
145 0 0.00 1.00
146 0 0.00 1.00
147 0 0.00 1.00
148 0 0.00 1.00
149 0 0.00 1.00
150 0 0.02 0.98
> predict(m, newdata=iris, type="class")
[1] "setosa" "setosa" "setosa" "setosa"
[5] "setosa" "setosa" "setosa" "setosa"
[9] "setosa" "setosa" "setosa" "setosa"
[13] "setosa" "setosa" "setosa" "setosa"
[17] "setosa" "setosa" "setosa" "setosa"
[21] "setosa" "setosa" "setosa" "setosa"
[25] "setosa" "setosa" "setosa" "setosa"
[29] "setosa" "setosa" "setosa" "setosa"
[33] "setosa" "setosa" "setosa" "setosa"
[37] "setosa" "setosa" "setosa" "setosa"
[41] "setosa" "setosa" "setosa" "setosa"
[45] "setosa" "setosa" "setosa" "setosa"
[49] "setosa" "setosa" "versicolor" "versicolor"
[53] "versicolor" "versicolor" "versicolor" "versicolor"
[57] "versicolor" "versicolor" "versicolor" "versicolor"
[61] "versicolor" "versicolor" "versicolor" "versicolor"
[65] "versicolor" "versicolor" "versicolor" "versicolor"
[69] "versicolor" "versicolor" "versicolor" "versicolor"
[73] "versicolor" "versicolor" "versicolor" "versicolor"
[77] "versicolor" "versicolor" "versicolor" "versicolor"
[81] "versicolor" "versicolor" "versicolor" "virginica"
[85] "versicolor" "versicolor" "versicolor" "versicolor"
[89] "versicolor" "versicolor" "versicolor" "versicolor"
[93] "versicolor" "versicolor" "versicolor" "versicolor"
[97] "versicolor" "versicolor" "versicolor" "versicolor"
[101] "virginica" "virginica" "virginica" "virginica"
[105] "virginica" "virginica" "virginica" "virginica"
[109] "virginica" "virginica" "virginica" "virginica"
[113] "virginica" "virginica" "virginica" "virginica"
[117] "virginica" "virginica" "virginica" "virginica"
[121] "virginica" "virginica" "virginica" "virginica"
[125] "virginica" "virginica" "virginica" "virginica"
[129] "virginica" "virginica" "virginica" "virginica"
[133] "virginica" "versicolor" "virginica" "virginica"
[137] "virginica" "virginica" "virginica" "virginica"
[141] "virginica" "virginica" "virginica" "virginica"
[145] "virginica" "virginica" "virginica" "virginica"
[149] "virginica" "virginica"
>
E.G. 2
wine <- read.csv("wine.csv")
head(wine)
summary(wine)
wine.scale <- cbind(wine[1], scale(wine[-1]))
summary(wine.scale)
apply(wine.scale[-1], 2, sd)
# Partitioning the data into training and test data
data.size <- nrow(wine.scale)
set.seed(1111)
samp <- c(sample(1:data.size, data.size * 0.7))
data.tr <- wine.scale[samp, ]
data.test <- wine.scale[-samp, ]
summary(data.tr)
summary(data.test)
# Fitting the neural network for the training data
library(nnet)
model.nnet <- nnet(Type ~ ., data = data.tr, size = 2, decay = 5e-04, maxit = 200)
names(model.nnet)
# Creating the confusion matrix for the model predicted <- predict(model.nnet, data.test, type = "class") predicted actual <- data.test$Type model.confusion.matrix <- table(actual, predicted) model.confusion.matrix
confusion.matrix.rate = prop.table(model.confusion.matrix) * 100 round(confusion.matrix.rate, digit = 2)
diag.index <- cbind(1:3, 1:3)
error.overall = sum(confusion.matrix.rate) - sum(confusion.matrix.rate[diag.index])
paste("오차율 =", round(error.overall, digit = 2), "%")
r/neural_network.1481672713.txt.gz · Last modified: by hkimscil
