Loading TOC...

cntk:convolution

cntk:convolution(
   $convolution-map as cntk:variable,
   $operand as cntk:variable,
   $additional-parameters as map:map
) as cntk:function

Summary

Computes the convolution of convolution_map (typically a tensor of learnable parameters) with operand (commonly an image or output of a previous convolution/pooling operation). This operation is used in image and language processing applications. It supports arbitrary dimensions, strides, sharing, and padding.

Parameters
$convolution-map Convolution filter weights, stored as a tensor of dimensions [O×I×m1×m2×…×mn], where [m1×m2×…×mn] must be the kernel dimensions (spatial extent of the filter).
$operand Convolution input. A tensor with dimensions [I×M1×M2×…×Mn].
$additional-parameters

Example

  xquery version "1.0-ml";
  let $num-classes := 2
  let $num-samples :=5
  let $input-shape := cntk:shape((64,28,3))
  let $input-variable := cntk:input-variable($input-shape, "float")
  let $convolution-option := map:map()=>
                            map:with("filter-shape", (3,3))=>
                            map:with("num-filters", 10)=>
                            map:with("auto-padding", fn:false())
  let $W := cntk:parameter(cntk:shape((3,3,3,10)), "float", cntk:glorot-uniform-initializer())
  let $convolved-variable := cntk:convolution($W, $input-variable, $convolution-option)
  let $dense-option := map:map()=>
                      map:with("output-shape", cntk:shape(($num-classes)))
  let $dense-output := cntk:dense-layer($convolved-variable, $dense-option)
  let $input-value-array := json:to-array((1 to 64*28*3*$num-samples))
  let $input-value := cntk:batch($input-shape, $input-value-array)
  let $label-shape := cntk:shape(($num-classes))
  let $label-variable := cntk:input-variable($label-shape, "float")
  let $label-array := json:to-array((1,0,0,1,0,1,1,0,0,1))
  let $label-value := cntk:batch($label-shape, $label-array)
  let $learner := cntk:sgd-learner((cntk:function-parameters($dense-output)), cntk:learning-rate-schedule-from-constant(0.1))
  let $loss := cntk:cross-entropy-with-softmax($dense-output, $label-variable, cntk:axis(-1))
  let $trainer := cntk:trainer($dense-output, ($learner), $loss)
  let $input-pair := json:to-array(($input-variable, $input-value))
  let $label-pair := json:to-array(($label-variable, $label-value))
  let $minibatch := json:to-array(($input-pair, $label-pair))
  return cntk:train-minibatch($trainer, $minibatch, fn:false())
  => true

Stack Overflow iconStack Overflow: Get the most useful answers to questions from the MarkLogic community, or ask your own question.