-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathMinimaxAI_DC.java
164 lines (140 loc) · 5.26 KB
/
MinimaxAI_DC.java
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
/**
* This class provides methods for the minimax AI. It has a minimax depth that can be changed,
* and it provides the public method getAction() for returning an optimal action given a gameState.
* Mutator and accessor methods are provided for the AI's current location.
*
* @author David Chen
* @version Java 1.8.0 - 3/17/20
*/
import java.util.*;
public class MinimaxAI_DC {
private int boardSize;
private int currR, currC;
private String playerState;
private int minimaxDepth;
public MinimaxAI_DC(int boardSize, int initR, int initC, int minimaxDepth) {
this.boardSize = boardSize;
currR = initR;
currC = initC;
this.minimaxDepth = minimaxDepth;
}
// for making a copy of the objects
public MinimaxAI_DC(MinimaxAI_DC player) {
this.boardSize = player.boardSize;
this.currR = player.currR;
this.currC = player.currC;
this.minimaxDepth = player.minimaxDepth;
}
public int[] getAction(GameState_DC gameState) {
ArrayList<int[]> bestActions = new ArrayList<int[]>();
double bestVal = Double.POSITIVE_INFINITY;
ArrayList<int[]> actions = gameState.getPossibleActions();
orderActions(actions);
for (int[] action : actions) {
double val = minimax(gameState.generateSuccessor(action), minimaxDepth, Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY);
// System.out.println(gameState.getCurrAction() + " " + action[0] + " " + action[1] + " " + val);
if (val < bestVal) {
bestVal = val;
bestActions.clear(); // remove suboptimal actions
bestActions.add(action);
if (val < -1000) { // win, no need to check other moves
break;
}
}
else if (val == bestVal) {
bestActions.add(action);
}
}
// pick a random action from all the best actions
int r = (int) (Math.random() * bestActions.size());
return bestActions.get(r);
}
private double minimax(GameState_DC gameState, int depth, double alpha, double beta) {
if (depth == 0 || gameState.gameWinner() != 0) { // depth = 0 or game over
return evaluate(gameState);
}
ArrayList<int[]> actions = gameState.getPossibleActions();
// randomly shuffle actions -> alpha/beta improvements
orderActions(actions);
int currPlayer = gameState.getCurrPlayer();
String currAction = gameState.getCurrAction();
if (currPlayer == 1) {
for (int[] action : actions) {
double v = minimax(gameState.generateSuccessor(action), depth - 1, alpha, beta);
alpha = Math.max(alpha, v);
if (alpha >= beta) {
break;
}
}
return alpha;
}
else {
for (int[] action : actions) {
double v = minimax(gameState.generateSuccessor(action), depth - 1, alpha, beta);
beta = Math.min(beta, v);
if (alpha >= beta) {
break;
}
}
return beta;
}
}
// provide an estimate of the score of the current gameState
// considers mobility and center proximity
private double evaluate(GameState_DC gameState) {
double p1Pos = gameState.getPossibleMoves(1).size() - centerProximity(gameState, 1);
double p2Pos = gameState.getPossibleMoves(2).size() - centerProximity(gameState, 2);
if (gameState.gameWinner() == 1) {
return 1000000.0 - gameState.getMoveNum() + p1Pos - p2Pos;
}
if (gameState.gameWinner() == 2) {
return -1000000.0 + gameState.getMoveNum() + p1Pos - p2Pos;
}
if (gameState.gameWinner() == 3) { // tie
return 100.0 + p1Pos - p2Pos;
}
return p1Pos - p2Pos;
}
private double centerProximity(GameState_DC gameState, int playerNum) {
double centerX = (boardSize - 1) / 2.;
double centerY = (boardSize - 1) / 2.;
double posX, posY;
if (playerNum == 1) {
posX = gameState.getP1Pos()[0];
posY = gameState.getP1Pos()[1];
}
else {
posX = gameState.getP2Pos()[0];
posY = gameState.getP2Pos()[1];
}
// dist to center
return distance(posX, centerX, posY, centerY);
// number of moves to reach center
// return Math.max(Math.abs(posX - centerX), Math.abs(posY - centerY));
}
private double distance(double x1, double y1, double x2, double y2) {
return Math.sqrt(Math.pow(x1 - x2, 2) + Math.pow(y1 - y2, 2));
}
// randomize order of actions to try and get more alpha/beta pruning
public void orderActions(ArrayList<int[]> actions) {
Collections.shuffle(actions);
}
public void setDepth(int depth) {
minimaxDepth = depth;
}
public int getDepth() {
return minimaxDepth;
}
public void setCurrR(int newR) {
currR = newR;
}
public void setCurrC(int newC) {
currC = newC;
}
public int getCurrR() {
return currR;
}
public int getCurrC() {
return currC;
}
}