Skip to content

Commit 506b1f2

Browse files
committed
added backendfiles
1 parent f6641ea commit 506b1f2

File tree

1,406 files changed

+237922
-0
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

1,406 files changed

+237922
-0
lines changed
Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,12 @@
1+
FROM ubuntu:18.04
2+
3+
RUN apt update && \
4+
apt install -y octave python3 python3-pip
5+
6+
RUN octave --eval 'pkg install -forge nnet'
7+
8+
COPY . /
9+
10+
RUN pip3 install -r requirements.txt
11+
12+
CMD python3 server.py
Lines changed: 117 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,117 @@
1+
function [] = clnnMappingCore(iterationStepSize)
2+
global filename;
3+
4+
%X: Matrix of input vectors (N X d):
5+
% Each input vector is a row vector.
6+
%N: Number of input vectors.
7+
%d: Dimension of each input vector.
8+
9+
%gridLength: Number of neurons per side (assuming a square grid of neurons).
10+
%numIterations: Total number of iterations / time-steps
11+
12+
% SOM/Kohonen map K neurons arranged in a square LxL layout (K = L^2)
13+
% mapping a square
14+
15+
load(strcat(filename, ".dat"));
16+
maxIterationIndex = min(currentIterationIndex+iterationStepSize-1, numIterations);
17+
currentIterationIndex
18+
maxIterationIndex
19+
20+
% For each iteration
21+
for i=currentIterationIndex:maxIterationIndex
22+
ri=[];
23+
disp('Iteration number:')
24+
disp(i)
25+
26+
rnd = randperm(N);
27+
28+
% For N input vectors.
29+
for p=1:N
30+
j=rnd(p);
31+
32+
% Compute distance.
33+
dist = (W - repmat(X(j,:),K,1) ).^2 ;
34+
tsumdist = sum(dist,2);
35+
36+
% Find winning neuron.
37+
[mindist ind] = min(tsumdist);
38+
39+
% 2D index of winning neuron.
40+
ri(j,1) = I(ind);
41+
ri(j,2) = J(ind);
42+
43+
44+
% Distance of other neurons from the winning neuron.
45+
dist = 1/(sqrt(2*piConst)*sigT).*exp( sum(( ([I( : ), J( : )]- repmat([ri(j,1), ri(j,2)], K,1)) .^2) ,2)/(-2*sigT)) * etaT;
46+
47+
48+
W = W + repmat(dist(:),1,d).*(repmat(X(j,:),K,1) - W);
49+
50+
% Clear temp variables.
51+
clear j dist tsumdist mindist ind;
52+
53+
end
54+
55+
clear ri rnd;
56+
% Update neighbourhood function.
57+
sigT = sig0*exp(-i/tau1);
58+
59+
% Update the learning rate.
60+
etaT = eta0*exp(-i/tau2);
61+
end
62+
63+
currentIterationIndex = i+1;
64+
clear iterationStepSize;
65+
66+
% Connecting the adjacent nodes in the 2D map and plotting.
67+
%figure;
68+
axes('FontSize', 25);
69+
70+
subplot(1,2,1);
71+
plot(X(:,1), X(:,2), 'ko', "markersize", 15);
72+
xlabel('x-coordinate of data');
73+
ylabel('y-coordinate of data');
74+
title('Data distribution');
75+
xlim([-0.1 1.1]);
76+
ylim([-0.1 1.1]);
77+
axis 'equal'
78+
79+
subplot(1,2,2);
80+
for k=1:K
81+
wdist = indx - repmat(indx(k,:),K,1);
82+
wdistSqr = sum(wdist.*wdist, 2);
83+
neighbourIndex = find(wdistSqr == 1);
84+
85+
%k
86+
%neighbourIndex
87+
%pause
88+
numNeighbours = length(neighbourIndex);
89+
90+
for kk=1:numNeighbours
91+
wtemp(1,:) = W(k,:);
92+
ind = neighbourIndex(kk);
93+
wtemp(2,:) = W(ind,:);
94+
plot(wtemp(:,1), wtemp(:,2), 'k');
95+
hold on;
96+
clear wtemp ind;
97+
end
98+
end
99+
100+
clear k wdist wdistSqr neighbourIndex numNeighbours kk wtemp ind;
101+
k=currentIterationIndex-1;
102+
plot(W(:,1), W(:,2), 'r*', "markersize", 15);
103+
hold off;
104+
xlabel('x-coordinate of weight vector');
105+
ylabel('y-coordinate of weight vector');
106+
title(['SOM after iteration #' num2str(k)]);
107+
xlim([-0.1 1.1]);
108+
ylim([-0.1 1.1]);
109+
axis 'equal'
110+
111+
112+
% Save figure;
113+
print(strcat(filename, ".png"));
114+
115+
% Save state of variables.
116+
save(strcat(filename, ".dat"));
117+
Lines changed: 143 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,143 @@
1+
function [] = clnn_mapping_2D_2D(X, gridLength, numIterations)
2+
3+
%X: Matrix of input vectors (N X d):
4+
% Each input vector is a row vector.
5+
%N: Number of input vectors.
6+
%d: Dimension of each input vector.
7+
8+
%gridLength: Number of neurons per side (assuming a square grid of neurons).
9+
%numIterations: Total number of iterations / time-steps
10+
11+
12+
% SOM/Kohonen map K neurons arranged in a square LxL layout (K = L^2)
13+
% mapping a square
14+
15+
16+
17+
[N,d] = size(X);
18+
X = X/max(max(abs(X)));
19+
L = gridLength;
20+
K = L^2;
21+
T = numIterations;
22+
23+
24+
figure;
25+
plot(X(:,1), X(:,2), 'k*');
26+
pause
27+
28+
29+
30+
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
31+
% Variables and setup
32+
33+
pi = 3.1416;
34+
35+
% Initialize weights
36+
% Each weight vector is a row vector.
37+
% Weight index is a number, which can be converted into (i,j) form.
38+
W = rand(K,d);
39+
40+
% Neuron index in (i,j) form.
41+
[I,J] = ind2sub([L, L], 1:K);
42+
43+
indx(:,1) = I(:);
44+
indx(:,2) = J(:);
45+
46+
47+
% Number of neighbourhood neurons which need to be updated.
48+
sig0 = floor(K/5);
49+
50+
% This number should be updated after every iteration/epoch.
51+
sigT = sig0;
52+
53+
% Constant for updating sigT.
54+
tau1 = T;
55+
56+
% Learning rate.
57+
eta0 = 0.1;
58+
etaT = eta0;
59+
60+
% Constant for updating etaT.
61+
tau2 = 2*T;
62+
63+
% dT: Number of iterations/time-steps for which plot needs to be shown once.
64+
% For example, if dT=10, then plot is generated once in every 10 iterations.
65+
dT = 20;
66+
67+
68+
69+
% For each iteration
70+
for i=1:T
71+
ri=[];
72+
disp('Iteration number:')
73+
disp(i)
74+
75+
rnd = randperm(N);
76+
77+
% For N input vectors.
78+
for p=1:N
79+
j=rnd(p);
80+
81+
% Compute distance.
82+
dist = (W - repmat(X(j,:),K,1) ).^2 ;
83+
tsumdist = sum(dist,2);
84+
85+
% Find winning neuron.
86+
[mindist ind] = min(tsumdist);
87+
88+
% 2D index of winning neuron.
89+
ri(j,1) = I(ind);
90+
ri(j,2) = J(ind);
91+
92+
93+
% Distance of other neurons from the winning neuron.
94+
dist = 1/(sqrt(2*pi)*sigT).*exp( sum(( ([I( : ), J( : )]- repmat([ri(j,1), ri(j,2)], K,1)) .^2) ,2)/(-2*sigT)) * etaT;
95+
96+
97+
W = W + repmat(dist(:),1,d).*(repmat(X(j,:),K,1) - W);
98+
99+
% Plotting weights
100+
101+
102+
end
103+
104+
% Update neighbourhood function.
105+
sigT = sig0*exp(-i/tau1);
106+
107+
% Update the learning rate.
108+
etaT = eta0*exp(-i/tau2);
109+
110+
if mod(i,dT) == dT-1
111+
for k=1:K
112+
wdist = indx - repmat(indx(k,:),K,1);
113+
wdistSqr = sum(wdist.*wdist, 2);
114+
neighbourIndex = find(wdistSqr == 1);
115+
116+
%k
117+
%neighbourIndex
118+
%pause
119+
numNeighbours = length(neighbourIndex);
120+
121+
for kk=1:numNeighbours
122+
wtemp(1,:) = W(k,:);
123+
ind = neighbourIndex(kk);
124+
wtemp(2,:) = W(ind,:);
125+
plot(wtemp(:,1), wtemp(:,2), 'k');
126+
hold on;
127+
clear wtemp ind;
128+
end
129+
end
130+
plot(W(:,1), W(:,2), 'r*');
131+
hold off;
132+
pause
133+
end
134+
135+
% Plotting weights
136+
%if mod(i,dT) == dT-1
137+
% labels = num2str(C(:));
138+
% text(ri(:,1), ri(:,2), labels);
139+
% pause
140+
%end
141+
end
142+
143+

0 commit comments

Comments
 (0)