%% Problem Sheet 6 % Stochastic Simulation close all; clear all; clc; path = 'D:/Dropbox/Büro/Teaching/Stochastic Simulation of Processes, Fields and Structures/02 SoSe 2015 Tim Brereton/Problem Sheets/Problem Sheet 06/'; % path = 'C:/Users/Stochastik/Lisa/StoSim-Übung/'; %% Exercise 3 % sample size and parameters N = 10^5; n = 5; lambda = 0.5; % (0.5, 1.5) n_of_ones = zeros(N,1); X = zeros(n,n); % initial state: zeros for steps = 1:N Y = X; % pick random element i = ceil(n*rand()); j = ceil(n*rand()); % check if there are 1s around it if ((i+1 <= n) && (X(i+1,j) == 1)) ... || ((i-1 >= 1) && (X(i-1,j) == 1)) ... || ((j+1 <= n) && (X(i,j+1) == 1)) ... || ((j-1 >= 1) && (X(i,j-1) == 1)) % if yes, set this element to zero Y(i,j) = 0; else % otherwise, set it to 1 with probability as in Exercise 2. p = lambda/(lambda+1); Y(i,j) = (rand() <= p); end X = Y; n_of_ones(steps) = sum(sum(X)); end X fprintf('The estimated mean number of ones is %f\n', mean(n_of_ones)); %% Exercise 4 close all; clc; % sample size and parameters J = 3; M = 5; m = 10; T = 1; % (increase the temperature for a better mixed distribution) N = 10^6; sums = zeros(N,1); X = ceil(rand(m,m)*M); % initial state: independent uniform RVs for steps = 1:N % pick random element i = ceil(m*rand); j = ceil(m*rand); % collect surrounding elements above = X(mod(i-2,m)+1,j); below = X(mod(i,m)+1,j); left = X(i,mod(j-2,m)+1); right = X(i,mod(j,m)+1); % compute the conditional distribution cond_prob = zeros(1, M); for k = 1:M % e^(-(local energy)/T) cond_prob(k) = exp(J*((k == above) + (k == below) + (k == left) + (k == right))/T); end % normalization cond_prob = cond_prob/(sum(cond_prob)); % draw from the conditional distribution X(i,j) = find( rand() < cumsum(cond_prob), 1 ); sums(steps) = sum(sum(X)); end hm = HeatMap(X, 'Colormap', 'autumn'); fprintf('The estimated expected value: %f\n', mean(sums)); %% Exercise 6 (simulated annealing) close all; clc; data = dlmread([path, 'hiking.txt']); weights = data(:,1); values = data(:,2); n = size(data,1); % names (just for output) names = {'apple', 'banana', 'beer', 'book', 'camera', 'cheese', ... 'compass', 'glucose', 'map', 'note-case', 'sandwich', ... 'socks', 'sunglasses', 'suntan cream', 't-shirt', 'tin', ... 'towel', 'trousers', 'umbrella', 'water', ... 'waterproof overclothes', 'waterproof trousers'}; % parameters and sample size w_max = 400; N = 10^4; beta = 0.999; % initial state (empty knapsack) and temperature X = []; T = 100; cost_function = @(x) -sum(values(x)); totalValues = zeros(N,1); totalWeights = zeros(N,1); for i=1:N % add an element left = setdiff(1:n, X); index = ceil(length(left)*rand()); Y = [X, left(index)]; % remove random elements until under the weight limit while sum(weights(Y)) > w_max index = ceil(length(Y)*rand()); Y = [Y(1:(index-1)), Y((index+1):end)]; end % accept with the right probability alpha = exp(-(cost_function(Y)-cost_function(X))/T); if rand() <= alpha X = Y; end % lower temperature T = beta*T; totalValues(i) = sum(values(X)); totalWeights(i) = sum(weights(X)); end fprintf('The final total value is %.0f\n', sum(values(X))); fprintf('The final total weight is %.0f\n\n', sum(weights(X))); list = names(sort(X)); fprintf('List of things to take with you:\n'); for i = 1:length(list) fprintf('%s\n', char(list(i))); end % plot figure(1); plot(totalValues); hold on; plot(totalWeights, 'red'); hold off; title('Total weight and value per iteration'); legend('values', 'weights', 'Location', 'East') xlabel('iteration') % % brute force (to check if the solution is correct) % mincost = Inf; % solution = []; % for i=1:n % combinations = combnk(1:n, i); % for j=1:size(combinations,1) % if (cost_function(combinations(j,:)) < mincost) ... % && (sum(weights(combinations(j,:))) <= w_max) % mincost = cost_function(combinations(j,:)); % solution = combinations(j,:); % end % end % end % % names(solution) % % sum(values(solution)) % sum(weights(solution)) % % isequal(sort(X), solution) %% Exercise 7 close all; clear all; N = 10^4; % sample size gamma = 4; % minimum length of shortest path lambda = 2; % Parameter of Poisson distribution % initialization path_lengths = zeros(N,1); X = 4*ones(1, 5); for i = 1:N Y = X; % choose element to change index = ceil(5*rand()); % determine lower bound if index == 1 bound = max( gamma-X(2), gamma-X(3)-X(5) ); elseif index == 2 bound = max( gamma-X(1), gamma-X(3)-X(4) ); elseif index == 3 bound = max( gamma-X(1)-X(5), gamma-X(2)-X(4) ); elseif index == 4 bound = max( gamma-X(5), gamma-X(2)-X(3) ); else bound = max( gamma-X(4), gamma-X(1)-X(3) ); end % sample from truncated poisson distribution Y(index) = poissrnd(lambda); while Y(index) <= bound Y(index) = poissrnd(lambda); end % update X X = Y; % remember shortest path length path_lengths(i) = min([X(1)+X(2), X(4)+X(5), ... X(1)+X(3)+X(5), X(4)+X(3)+X(2)]); end fprintf('The estimated mean length of the shortest path is %f\n', mean(path_lengths));