Analysis of Thermal Conductivity of Alloys at SLAC

This is the impressively short script that loads a data file (will be made available here soon), trains a model, analyzes it, and creates a csv file containing the full model. For this case study we thank Suchismita Sarker from SLAC. Note, this code works for version 5 of gpCAM.

import numpy as np

from gpcam.gp_optimizer import GPOptimizer

import matplotlib.pyplot as plt


##import your data file here

a = np.genfromtxt("name_of_data.csv", delimiter = ",")


######################################################


x = a[:,0:3]

y = a[:,3:4]

index_set_bounds = np.array([[0.,1.],[0.,1.],[0.,1.]])

hyperparameter_bounds = np.array([[0.001,1e9],[0.001,100],[0.001,100],[0.001,100]])

#hps = np.array([7.19526714e+00, 5.86871691e+03, 1.06622557e+03, 8.31750073e+04])

###################################################################################

gp = GPOptimizer(3,1,1, index_set_bounds, hyperparameter_bounds,

gp_kernel_function=None)#,

#objective_function=obj_func)

gp.tell(x,y,likelihood_optimization_method = "global",

init_hyperparameters=None, likelihood_optimization_max_iter=120,

likelihood_optimization_pop_size=100, likelihood_optimization_tolerance=0.000001,

dask_client=False)

######################################################

######################################################

######################################################

print("evaluating objective function at [0.5,0.5,0.5]")

print("=======================")

r = gp.evaluate_objective_function(np.array([[0.5,0.5,0.5]]))

print("result: ",r)

input("Continue with ENTER")

print("getting data from gp optimizer:")

print("=======================")

r = gp.get_data()

print(r)

input("Continue with ENTER")

print("ask()ing for new suggestions")

print("=======================")

r = gp.ask()

print(r)

input("Continue with ENTER")

print("getting the maximum (remember that this means getting the minimum of -f(x)):")

print("=======================")

r = gp.ask(objective_function = "maximum")

print(r)

print("getting the minimum:")

print("=======================")

r = gp.ask(objective_function = "minimum")

print(r)

input("Continue with ENTER")

print("Writing interpolation to file...")

print("=======================")


ar3d = np.empty((50,50,50))

l = np.empty((50*50*50,4))

x = np.linspace(0,1,50)

y = np.linspace(0,1,50)

z = np.linspace(0,1,50)

counter = 0

for i in range(50):

print("done ",((i+1.0)/50.0)*100.," percent")

for j in range(50):

for k in range(50):

res = gp.gp.posterior_mean(np.array([[x[i],y[j],z[k]]]))

ar3d[i,j,k] = res["f(x)"]

l[counter,0] = x[i]

l[counter,1] = y[j]

l[counter,2] = z[k]

l[counter,3] = res["f(x)"] / 10000.0

counter += 1


file_name = "data_list.csv"

np.savetxt(file_name,l, delimiter = ",",header = 'x coord, y coord, z_coord, scalar')

print("==================================================")

print("data cube written in 'data_list.csv'; you can use paraview to visualize it")

print("END")

print("==================================================")

print("==================================================")

The result is a three-dimensional model stored in CSV format.