org.bigml.binding.laminar.MathOps Maven / Gradle / Ivy
Go to download
Show more of this group Show more artifacts with this name
Show all versions of bigml-binding Show documentation
Show all versions of bigml-binding Show documentation
An open source Java client that gives you a simple binding to interact with BigML. You can use it to
easily create, retrieve, list, update, and delete BigML resources.
package org.bigml.binding.laminar;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import org.json.simple.JSONArray;
import org.json.simple.JSONObject;
/**
* Activation functions and helpers
*
*/
public class MathOps {
// This can be any x where np.exp(x) + 1 == np.exp(x) Going up to 512
// isn't strictly necessary, but hey, why not?
private static int LARGE_EXP = 512;
private static ArrayList> operation(
String operator, ArrayList> mat, JSONArray vec) {
ArrayList> out = new ArrayList>();
for (int i=0; i row = (List) mat.get(i);
List newRow = new ArrayList();
for (int k=0; k> plus(
ArrayList> mat, JSONArray vec) {
return operation("+", mat, vec);
}
private static ArrayList> minus(
ArrayList> mat, JSONArray vec) {
return operation("-", mat, vec);
}
private static ArrayList> times(
ArrayList> mat, JSONArray vec) {
return operation("*", mat, vec);
}
private static ArrayList> divide(
ArrayList> mat, JSONArray vec) {
return operation("/", mat, vec);
}
private static ArrayList> dot(
ArrayList> mat1, JSONArray mat2) {
ArrayList> outMat = new ArrayList>();
for (int i=0; i row1 = (List) mat1.get(i);
List newRow = new ArrayList();
for (int j=0; j row2 = (List) mat2.get(j);
double sum = 0.0;
for (int k=0; k> batchNorm(
ArrayList> mat, JSONArray mean,
JSONArray stdev, JSONArray shift, JSONArray scale) {
ArrayList> normVals = divide(minus(mat, mean), stdev);
return plus(times(normVals, scale), shift);
}
public static ArrayList> destandardize(
ArrayList> vec, Double mean, Double stdev) {
ArrayList> out = new ArrayList>();
for (int i=0; i row = (List) vec.get(i);
List newRow = new ArrayList();
for (int k=0; k> toWidth(
ArrayList> mat, int width) {
int ntiles = 1;
if (width > mat.get(0).size()) {
ntiles = (int) Math.ceil( width / mat.get(0).size() );
}
ArrayList> output = new ArrayList>();
for (List row: mat) {
List newRow = new ArrayList();
for (int i=0; i> addResiduals(
ArrayList> residuals,
ArrayList> identities) {
ArrayList> output = new ArrayList>();
ArrayList> toAdd =
toWidth(identities, residuals.get(0).size());
for (int i=0; i residualRow = (List) residuals.get(i);
List toAddRow = (List) toAdd.get(i);
List newRow = new ArrayList();
for (int j=0; j> propagate(
ArrayList> input, JSONArray layers) {
ArrayList> identities = input;
ArrayList> lastX = input;
for (Object layerObj: layers) {
JSONObject layer = (JSONObject) layerObj;
JSONArray weights = (JSONArray) layer.get("weights");
JSONArray mean = (JSONArray) layer.get("mean");
JSONArray stdev = (JSONArray) layer.get("stdev");
JSONArray scale = (JSONArray) layer.get("scale");
JSONArray offset = (JSONArray) layer.get("offset");
Boolean residuals = (Boolean) layer.get("residuals");
String afn = (String) layer.get("activation_function");
ArrayList> nextIn = dot(lastX, weights);
if (mean != null && stdev != null) {
nextIn = batchNorm(nextIn, mean, stdev, offset, scale);
} else {
nextIn = plus(nextIn, offset);
}
if (residuals != null && residuals) {
nextIn = addResiduals(nextIn, identities);
lastX = broadcast(afn, nextIn);
identities = lastX;
} else {
lastX = broadcast(afn, nextIn);
}
}
return lastX;
}
private static ArrayList> broadcast(
String afn, ArrayList> xs) {
ArrayList> result = new ArrayList>();
if (xs.size() == 0) {
return result;
}
if ("identity".equals(afn)) {
return xs;
}
if ("softmax".equals(afn)) {
return softmax(xs);
}
for (List row: xs) {
List newRow = new ArrayList();
for (Double d: row) {
if ("tanh".equals(afn)) {
newRow.add(Math.tanh(d));
}
if ("sigmoid".equals(afn)) {
if (d > 0) {
if (d < LARGE_EXP) {
double exVal = Math.exp(d);
newRow.add(exVal / (exVal + 1));
} else {
newRow.add(1.0);
}
} else {
if (-d < LARGE_EXP) {
newRow.add(1 / (1 + Math.exp(-d)));
} else {
newRow.add(0.0);
}
}
}
if ("softplus".equals(afn)) {
newRow.add(d < LARGE_EXP ? Math.log((Math.exp(d) + 1)) : d);
}
if ("relu".equals(afn)) {
newRow.add(d>0 ? d : 0);
}
}
result.add(newRow);
}
return result;
}
private static ArrayList> softmax(
ArrayList> xs) {
double max = 0.0;
for (List row: xs) {
double maxRow = Collections.max(row);
max = maxRow > max ? maxRow : max;
}
ArrayList> exps = new ArrayList>();
for (List row: xs) {
List newRow = new ArrayList();
for (Double d: row) {
newRow.add(Math.exp(d - max));
}
exps.add(newRow);
}
double sumex = 0.0;
for (List exp: exps) {
for (Double d: exp) {
sumex += d;
}
}
ArrayList> result = new ArrayList>();
for (List exp: exps) {
List newRow = new ArrayList();
for (Double d: exp) {
newRow.add(d / sumex);
}
result.add(newRow);
}
return result;
}
public static ArrayList> sumAndNormalize(
ArrayList>> inputs, boolean isRegression) {
ArrayList> first = (ArrayList>) inputs.get(0);
Double[] ysums = new Double[first.get(0).size()];
for (int j=0; j> input = (ArrayList>) inputObj;
for (int k=0; k> outDist = new ArrayList>();
List dist = new ArrayList();
double sum = 0.0;
for (int j=0; j
© 2015 - 2024 Weber Informatics LLC | Privacy Policy