Loading TOC...

cntk:dense-layer

cntk:dense-layer(
   $operand as cntk:variable,
   $addition-parameters as map:map
) as cntk:function

Summary

The creates a dense layer and apply it to operand. A dense layer is a fully connected layer of the form y = Wx + b, where x is the input, y is the output, W is the weight, and b is the bias.;

Parameters
$operand The operand of the operation.
$addition-parameters

"output-shape": cntk:shape. The shape of the output of this layer.

"activation": xs:string. Default: "identity". The optional activation to apply at the end.

"init": xs:double, or cntk:value, or cntk:initializer. Default: cntk:glorot-uniform-initializer(). Initial value of weights W.

"input-rank": xs:integer. Number of inferred axes to add to W (map-rank must not be given).

"map-rank": xs:integer. Expand W to leave exactly map-rank axes (input-rank must not be given).

"bias": xs:boolean. Default: true. If set to false, the layer will have no bias.

"bias-init": xs:double, or cntk:initializer. Default: 0. Initial value of bias.

"name": xs:string. Default: ""

Example

  xquery version "1.0-ml";
  let $num-classes := 2
  let $num-samples :=5
  let $input-shape := cntk:shape((64,28,3))
  let $input-variable := cntk:input-variable($input-shape, "float")
  let $convolution-option := map:map()=>
                            map:with("filter-shape", (3,3))=>
                            map:with("num-filters", 10)=>
                            map:with("auto-padding", fn:false())
  let $W := cntk:parameter(cntk:shape((3,3,3,10)), "float", cntk:glorot-uniform-initializer())
  let $convolved-variable := cntk:convolution($W, $input-variable, $convolution-option)
  let $dense-option := map:map()=>
                      map:with("output-shape", cntk:shape(($num-classes)))
  let $dense-output := cntk:dense-layer($convolved-variable, $dense-option)
  let $input-value-array := json:to-array((1 to 64*28*3*$num-samples))
  let $input-value := cntk:batch($input-shape, $input-value-array)
  let $label-shape := cntk:shape(($num-classes))
  let $label-variable := cntk:input-variable($label-shape, "float")
  let $label-array := json:to-array((1,0,0,1,0,1,1,0,0,1))
  let $label-value := cntk:batch($label-shape, $label-array)
  let $learner := cntk:sgd-learner((cntk:function-parameters($dense-output)), cntk:learning-rate-schedule-from-constant(0.1))
  let $loss := cntk:cross-entropy-with-softmax($dense-output, $label-variable, cntk:axis(-1))
  let $trainer := cntk:trainer($dense-output, ($learner), $loss)
  let $input-pair := json:to-array(($input-variable, $input-value))
  let $label-pair := json:to-array(($label-variable, $label-value))
  let $minibatch := json:to-array(($input-pair, $label-pair))
  return cntk:train-minibatch($trainer, $minibatch, fn:false())
  => true

Stack Overflow iconStack Overflow: Get the most useful answers to questions from the MarkLogic community, or ask your own question.