aboutsummaryrefslogtreecommitdiff
path: root/chapters/ewise_binary.adoc
diff options
context:
space:
mode:
authorEric Kunze <eric.kunze@arm.com>2021-03-11 15:38:22 -0800
committerEric Kunze <eric.kunze@arm.com>2021-03-11 15:39:53 -0800
commit839830a72ebb27c4a818e15d3334e79085906dc8 (patch)
tree9999a6fc050d75122eb0a46a215558c0665d6e91 /chapters/ewise_binary.adoc
parent8e4a9d33f0527107fda724fc0f7b6b7c1f42bf79 (diff)
downloadspecification-839830a72ebb27c4a818e15d3334e79085906dc8.tar.gz
Make pseudocode block tags consistent
They are all now [source,c++] and use ---- as the delimiter. Also made the pseudocode indentation consistent (4 spaces) in the introduction sections. Signed-off-by: Eric Kunze <eric.kunze@arm.com> Change-Id: I1287a60cfeae8b7e3c63b9fc8885b1ebf3d3386d
Diffstat (limited to 'chapters/ewise_binary.adoc')
-rw-r--r--chapters/ewise_binary.adoc30
1 files changed, 15 insertions, 15 deletions
diff --git a/chapters/ewise_binary.adoc b/chapters/ewise_binary.adoc
index 2b8d321..c005611 100644
--- a/chapters/ewise_binary.adoc
+++ b/chapters/ewise_binary.adoc
@@ -26,7 +26,7 @@ Axis of size 1 will be broadcast, as necessary. Rank of input tensors must match
*Operation Function:*
-[source,c]
+[source,c++]
----
for_each(index in shape) {
index1 = apply_broadcast(shape, shape1, index);
@@ -64,7 +64,7 @@ Axis of size 1 will be broadcast, as necessary. Rank of input tensors must match
*Operation Function:*
-[source,c]
+[source,c++]
----
for_each(index in shape) {
index1 = apply_broadcast(shape, shape1, index);
@@ -108,7 +108,7 @@ Axis of size 1 will be broadcast as necessary. Rank of input tensors must match.
*Operation Function:*
-[source,c]
+[source,c++]
----
for_each(index in shape) {
index1 = apply_broadcast(shape, shape1, index);
@@ -147,7 +147,7 @@ Axis of size 1 will be broadcast as necessary. Rank of input tensors must match.
*Operation Function:*
-[source,c]
+[source,c++]
----
for_each(index in shape) {
index1 = apply_broadcast(shape, shape1, index);
@@ -186,7 +186,7 @@ Axis of size 1 will be broadcast as necessary. Rank of input tensors must match.
*Operation Function:*
-[source,c]
+[source,c++]
----
for_each(index in shape) {
index1 = apply_broadcast(shape, shape1, index);
@@ -229,7 +229,7 @@ None
*Operation Function:*
-[source,c]
+[source,c++]
----
for_each(index in shape) {
index1 = apply_broadcast(shape, shape1, index);
@@ -266,7 +266,7 @@ Axis of size 1 will be broadcast, as necessary. Rank of input tensors must match
*Operation Function:*
-[source,c]
+[source,c++]
----
for_each(index in shape) {
index1 = apply_broadcast(shape, shape1, index);
@@ -306,7 +306,7 @@ Axis of size 1 will be broadcast, as necessary. Rank of input tensors must match
*Operation Function:*
-[source,c]
+[source,c++]
----
for_each(index in shape) {
index1 = apply_broadcast(shape, shape1, index);
@@ -346,7 +346,7 @@ Axis of size 1 will be broadcast as necessary. Rank of input tensors must match.
*Operation Function:*
-[source,c]
+[source,c++]
----
for_each(index in shape) {
index1 = apply_broadcast(shape, shape1, index);
@@ -383,7 +383,7 @@ Axis of size 1 will be broadcast as necessary. Rank of input tensors must match.
*Operation Function:*
-[source,c]
+[source,c++]
----
for_each(index in shape) {
index1 = apply_broadcast(shape, shape1, index);
@@ -420,7 +420,7 @@ Axis of size 1 will be broadcast, as necessary. Rank of input tensors must match
*Operation Function:*
-[source,c]
+[source,c++]
----
for_each(index in shape) {
index1 = apply_broadcast(shape, shape1, index);
@@ -458,7 +458,7 @@ Axis of size 1 will be broadcast, as necessary. Rank of input tensors must match
*Operation Function:*
-[source,c]
+[source,c++]
----
for_each(index in shape) {
index1 = apply_broadcast(shape, shape1, index);
@@ -497,7 +497,7 @@ Axis of size 1 will be broadcast, as necessary. Rank of input tensors must match
*Operation Function:*
-[source,c]
+[source,c++]
----
assert(in_t == int32_t || shift == 0);
for_each(index in shape) {
@@ -568,7 +568,7 @@ Axis of size 1 will be broadcast as necessary. Rank of input tensors must match.
*Operation Function:*
-[source,c]
+[source,c++]
----
for_each(index in shape) {
index1 = apply_broadcast(shape, shape1, index);
@@ -621,7 +621,7 @@ None
*Operation Function:*
-[source,c]
+[source,c++]
----
for_each(index in shape) {
in_t value = tensor_read<in_t>(input, shape, index);