Commit 661f123f authored by Jayant Khatkar's avatar Jayant Khatkar

updated docs for new interface

parent 5824ee43
......@@ -13,6 +13,25 @@ pip install git+
from DecMCTS import Tree
from copy import deepcopy
# Object stored at each node
class State:
def __init__(self, act_seq, cum_sum):
self.action_seq = act_seq
self.cumulative_sum = cum_sum
# This calculates the object stored at a given node given parent node and action
def state_storer(data, parent_state, action):
# Root Node edge case
if parent_state == None:
return State([],0) # This state is also used Null action when calculating local reward
state = deepcopy(parent_state) # Make sure to deepcopy if editing parent node
state.cumulative_sum = state.cumulative_sum + action
return state
# data can be anything required to calculate your
# global reward and available actions
......@@ -22,7 +41,7 @@ data = {}
# Create an available actions function
# This returns a list of possible actions to take from a given state
# state and robot_id inputs explained below
def avail_actions(data, state, robot_id):
def avail_actions(data, states, robot_id):
# This example is simply getting max sum,
# options are same regardless of state
......@@ -32,8 +51,8 @@ def avail_actions(data, state, robot_id):
# actions taken by the current robot, and every other robot
# State is a dictionary with keys being robot IDs, and values
# are a list of actions taken from the starting position
def reward(data, state):
each_robot_sum = [sum(state[a]) for a in state]
def reward(dat, states):
each_robot_sum= [states[robot].cumulative_sum for robot in states]
return sum(each_robot_sum)
# Number of Action Sequences to communicate
......@@ -2,6 +2,6 @@ from distutils.core import setup
......@@ -23,7 +23,6 @@ def avail_actions(data, states, robot_id):
return [1,2,3,4,5]
def reward(dat, states):
print states
each_robot_sum= [states[robot].cumulative_sum for robot in states]
return sum(each_robot_sum)
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment