Skip to content
Merged
Changes from 1 commit
Commits
Show all changes
54 commits
Select commit Hold shift + click to select a range
ef0ce67
Initial checkin of Keras Optimzers and helper classes.
JimClarke5 Jul 28, 2020
9c113a7
Added static final NAME to replace hardcoded String in the create met…
JimClarke5 Aug 20, 2020
824d487
Changed of method to use the DataType NAME attribute rather than hard…
JimClarke5 Aug 20, 2020
07a83a5
Added method WriteFieldWithInitializer to output a "final static Stri…
JimClarke5 Aug 20, 2020
3d26831
Added tf.nn.softmaxCrossEntropyWitLogits() and tf.nn.raw.softmaxCross…
JimClarke5 Aug 20, 2020
11cda5f
Moved SoftmaxCrossEntropyWithLogits and SparseSoftmaxCrossEntropyWit…
JimClarke5 Aug 20, 2020
9c7dfaa
Generated classes now have public static final String OP_NAME = "XXXX…
JimClarke5 Aug 20, 2020
84f49db
Generated classes now have public static final String OP_NAME = "XXXX…
JimClarke5 Aug 20, 2020
208b84a
fix dependencies for other Tensorflow Java modules
JimClarke5 Aug 20, 2020
3913161
formatting fix
JimClarke5 Aug 20, 2020
b5a7c0f
Fix ctors with name to properly pass the name to the the super ctor.
JimClarke5 Aug 20, 2020
fcba0a5
change asserts to IllegalArgumentException
JimClarke5 Aug 20, 2020
960cfc3
change asserts to IllegalArgumentException
JimClarke5 Aug 20, 2020
d37298a
Moved back to tests
JimClarke5 Aug 20, 2020
c68812c
Moved SoftmaxCrossEntropyWithLogits.java and SparseSoftmaxCrossEntrop…
JimClarke5 Aug 20, 2020
6b8eb26
Deleted files that are not necessary yet
JimClarke5 Aug 20, 2020
6515c24
Added nn.raw group for softmaxCrossEntropyWithLogits() and sparseSoft…
JimClarke5 Aug 20, 2020
76d0fe5
Added nn.raw group for softmaxCrossEntropyWithLogits() and sparseSoft…
JimClarke5 Aug 20, 2020
d2201df
Merge branch 'master' into master
JimClarke5 Aug 20, 2020
ab379d1
Refactor NN into individual operations under org.tensorflow.op.nn. Fi…
JimClarke5 Sep 3, 2020
889d67e
Refactor NN into individual operations under org.tensorflow.op.nn. Fi…
JimClarke5 Sep 3, 2020
515b799
Reformatted code
JimClarke5 Sep 3, 2020
5a9fe37
Added sub scope
JimClarke5 Sep 3, 2020
8d21dd7
Miscellaneous fixes based on review comments.
JimClarke5 Sep 3, 2020
4c3cc78
Fixed op_generator.cc to remove a spurious new line in the generated …
JimClarke5 Sep 3, 2020
44f530f
Changed back to non-generic Operand until we resolve how to handle ge…
JimClarke5 Sep 3, 2020
b8d3ac2
Regenerated due to creation of SoftmaxCrossEntropyWithLogits.java, S…
JimClarke5 Sep 3, 2020
c32fc5b
change snake case to camel case. format code
JimClarke5 Sep 7, 2020
171cd2f
clean upd warning, format code
JimClarke5 Sep 7, 2020
e9c3134
Added Adamax, Ftrl, and Nadam Optimizers. Added Optimizers enum for e…
JimClarke5 Sep 9, 2020
5c30a72
Removed optimize classes from tensorflow-keras, moved optimizer test …
JimClarke5 Sep 9, 2020
ebefc2e
Fixed generics
JimClarke5 Sep 9, 2020
7915e63
Fixed from Unit test results
JimClarke5 Sep 9, 2020
ec4f679
added @SuppressWarnings("unchecked") on Variable array
JimClarke5 Sep 9, 2020
c86d09b
Merge pull request #1 from tensorflow/master
JimClarke5 Sep 18, 2020
1a670ec
Added Support for evaluating TFloat16
JimClarke5 Sep 30, 2020
0cc9b9c
Add Activations
JimClarke5 Sep 30, 2020
ca77a0b
Remove no-arg CTORs
JimClarke5 Oct 1, 2020
73091be
Fix Unit Tests to include positive and negative numbers on input.
JimClarke5 Oct 1, 2020
946d1d5
Modify JavaDoc indicating Linear activation is also known as Identity…
JimClarke5 Oct 2, 2020
7c5cc4a
Changed DEFAULT values from private to public
JimClarke5 Oct 2, 2020
e32fe44
Fixed last sum to be over 'e' instead of 'input'
JimClarke5 Oct 2, 2020
0130914
Added tests for various parameter constructs.
JimClarke5 Oct 2, 2020
c7d0477
added tests for 1D and 3D input
JimClarke5 Oct 2, 2020
de0e610
Change snake case to camel case
JimClarke5 Oct 2, 2020
63c1f00
JavaDoc fixes
JimClarke5 Oct 4, 2020
2302cc5
Add TFloating family
JimClarke5 Oct 21, 2020
4c44c62
Add JavaDoc
JimClarke5 Oct 21, 2020
ef29af9
Changed to TFloating where appropriate.
JimClarke5 Oct 21, 2020
7519436
Remove the test of int arguments for those classes changed to TFloati…
JimClarke5 Oct 21, 2020
27c1126
Remove the test of int arguments for those classes changed to TFloati…
JimClarke5 Oct 21, 2020
b83f94f
Make LeakyRelu visible so that it is included in tf.nn.
JimClarke5 Oct 22, 2020
c59e905
Remove TNumber import
JimClarke5 Oct 22, 2020
ebbcc4f
Add tf.nn.leakyRelu operation
JimClarke5 Oct 22, 2020
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
Fix ctors with name to properly pass the name to the the super ctor.
  • Loading branch information
JimClarke5 committed Aug 20, 2020
commit b5a7c0f9f54acf1fb0f57003027aa0843ae67c82
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@
import java.util.List;
import java.util.Map;
import java.util.Optional;
import org.tensorflow.keras.backend.tf.ControlDependencies;
import static org.tensorflow.keras.optimizers.OptimizerInterface.assertGraph;
import org.tensorflow.op.Op;
import org.tensorflow.op.Ops;
Expand All @@ -33,7 +32,6 @@
* <p>Two accumulation steps are required: 1) the accumulation of gradients squared, 2) the
* accumulation of updates squared.
*
* @param <U> The Type for the call operation
*/
public class AdaDelta extends org.tensorflow.framework.optimizers.AdaDelta
implements OptimizerInterface {
Expand Down Expand Up @@ -69,7 +67,7 @@ public AdaDelta(Ops tf) {
* @param name the name of the Optimizer, defaults to "Adadelta"
*/
public AdaDelta(Ops tf, String name) {
this(tf, LEARNING_RATE_DEFAULT, RHO_DEFAULT, EPSILON_DEFAULT);
this(tf, name, LEARNING_RATE_DEFAULT, RHO_DEFAULT, EPSILON_DEFAULT);
}

/**
Expand All @@ -90,7 +88,7 @@ public AdaDelta(Ops tf, float learningRate) {
* @param learningRate The learning rate
*/
public AdaDelta(Ops tf, String name, float learningRate) {
this(tf, learningRate, RHO_DEFAULT, EPSILON_DEFAULT);
this(tf, name, learningRate, RHO_DEFAULT, EPSILON_DEFAULT);
}

/**
Expand Down Expand Up @@ -129,15 +127,14 @@ protected Optional<Op> prepare(String name) {
case 1:
return Optional.of(initializers.get(0));
default:
return Optional.of(
ControlDependencies.addControlDependencies(tf, this.getOptimizerName(), initializers));
return Optional.of( tf.withSubScope(name).withControlDependencies(initializers).noOp());
}
}

/**
* Create an Adam Optimizer from a config object
*
* @param graph the tensorflow graph
* @param tf the tensorflow Ops
* @param config a config object to initialize, he config object has keys for "name",
* "learning_rate", "rho" and "epsilon". If a key is missing the default value is used.
*/
Expand All @@ -148,7 +145,8 @@ public static AdaDelta fromConfig(Ops tf, Map<String, Object> config) {
/**
* Create an Adadelta optimizer
*
* @param graph the tensorflow graph @@param config a config object to initialize, the config
* @param tf the tensorflow Ops
* @param config a config object to initialize, the config
* object has keys for "name", "learning_rate", "rho" and "epsilon". If a key is missing the
* default value is used.
*/
Expand Down