@@ -687,8 +687,8 @@ letters before `c`:
687687 | var view = db._ createView("view", "arangosearch",
688688 { links: { test: { analyzers: [ "collation_en", "collation_sv" ] , includeAllFields: true }}});
689689 ~ assert(db._ query(` FOR d IN view COLLECT WITH COUNT INTO c RETURN c ` ).toArray()[ 0] === 4);
690- db._ query("FOR doc IN view SEARCH ANALYZER(doc.text < TOKENS('c', 'collation_en')[ 0] , 'collation_en') RETURN doc.text");
691- db._ query("FOR doc IN view SEARCH ANALYZER(doc.text < TOKENS('c', 'collation_sv')[ 0] , 'collation_sv') RETURN doc.text");
690+ db._ query("FOR doc IN view SEARCH ANALYZER(doc.text < TOKENS('c', 'collation_en')[ 0] , 'collation_en') RETURN doc.text").toArray() ;
691+ db._ query("FOR doc IN view SEARCH ANALYZER(doc.text < TOKENS('c', 'collation_sv')[ 0] , 'collation_sv') RETURN doc.text").toArray() ;
692692 ~ db._ dropView(view.name());
693693 ~ db._ drop(test.name());
694694 ~ analyzers.remove(en.name);
@@ -781,8 +781,8 @@ Concatenating Analyzer for conditionally adding a custom prefix or suffix:
781781 | var a = analyzers.save("concat", "aql", { queryString:
782782 | "RETURN LOWER(LEFT(@param , 5)) == 'inter' ? CONCAT(@param , 'ism') : CONCAT('inter', @param )"
783783 }, [ ] );
784- db._ query("RETURN TOKENS('state', 'concat')");
785- db._ query("RETURN TOKENS('international', 'concat')");
784+ db._ query("RETURN TOKENS('state', 'concat')").toArray() ;
785+ db._ query("RETURN TOKENS('international', 'concat')").toArray() ;
786786 ~ analyzers.remove(a.name);
787787 @END_EXAMPLE_ARANGOSH_OUTPUT
788788 @endDocuBlock analyzerAqlConcat
@@ -799,8 +799,8 @@ with `keepNull: false` and explicitly returning `null`:
799799 | var a = analyzers.save("filter", "aql", { keepNull: false, queryString:
800800 | "RETURN LOWER(LEFT(@param , 2)) == 'ir' ? null : @param "
801801 }, [ ] );
802- db._ query("RETURN TOKENS('regular', 'filter')");
803- db._ query("RETURN TOKENS('irregular', 'filter')");
802+ db._ query("RETURN TOKENS('regular', 'filter')").toArray() ;
803+ db._ query("RETURN TOKENS('irregular', 'filter')").toArray() ;
804804 ~ analyzers.remove(a.name);
805805 @END_EXAMPLE_ARANGOSH_OUTPUT
806806 @endDocuBlock analyzerAqlFilterNull
@@ -824,7 +824,7 @@ without `keepNull: false`:
824824 | var view = db._ createView("view", "arangosearch",
825825 { links: { coll: { fields: { value: { analyzers: [ "filter"] }}}}})
826826 ~ assert(db._ query(` FOR d IN view COLLECT WITH COUNT INTO c RETURN c ` ).toArray()[ 0] > 0);
827- db._ query("FOR doc IN view SEARCH ANALYZER(doc.value IN [ 'regular', 'irregular'] , 'filter') RETURN doc");
827+ db._ query("FOR doc IN view SEARCH ANALYZER(doc.value IN [ 'regular', 'irregular'] , 'filter') RETURN doc").toArray() ;
828828 ~ db._ dropView(view.name())
829829 ~ analyzers.remove(a.name);
830830 ~ db._ drop(coll.name());
@@ -995,7 +995,7 @@ with either of the stop words `and` and `the`:
995995 | var a = analyzers.save("stop", "stopwords", {
996996 | stopwords: ["616e64","746865"], hex: true
997997 }, []);
998- db._query("RETURN FLATTEN(TOKENS(SPLIT('the fox and the dog and a theater', ' '), 'stop'))");
998+ db._query("RETURN FLATTEN(TOKENS(SPLIT('the fox and the dog and a theater', ' '), 'stop'))").toArray() ;
999999 ~ analyzers.remove(a.name);
10001000 @END_EXAMPLE_ARANGOSH_OUTPUT
10011001 @endDocuBlock analyzerStopwords
@@ -1013,7 +1013,7 @@ lower-case and base characters) and then discards the stopwords `and` and `the`:
10131013 | { type: "norm", properties: { locale: "en", accent: false, case: "lower" } },
10141014 | { type: "stopwords", properties: { stopwords: ["and","the"], hex: false } },
10151015 ]}, []);
1016- db._query("RETURN FLATTEN(TOKENS(SPLIT('The fox AND the dog äñḏ a ţhéäter', ' '), 'norm_stop'))");
1016+ db._query("RETURN FLATTEN(TOKENS(SPLIT('The fox AND the dog äñḏ a ţhéäter', ' '), 'norm_stop'))").toArray() ;
10171017 ~ analyzers.remove(a.name);
10181018 @END_EXAMPLE_ARANGOSH_OUTPUT
10191019 @endDocuBlock analyzerPipelineStopwords
@@ -1075,7 +1075,7 @@ Create different `segmentation` Analyzers to show the behavior of the different
10751075 | "alpha": TOKENS(str, 'segment_alpha'),
10761076 | "graphic": TOKENS(str, 'segment_graphic'),
10771077 | }
1078- `);
1078+ `).toArray() ;
10791079 ~ analyzers.remove(all.name);
10801080 ~ analyzers.remove(alpha.name);
10811081 ~ analyzers.remove(graphic.name);
@@ -1120,7 +1120,7 @@ Create a `minhash` Analyzers:
11201120 | RETURN {
11211121 | approx: JACCARD(TOKENS(str1, "minhash5"), TOKENS(str2, "minhash5")),
11221122 | actual: JACCARD(TOKENS(str1, "segment"), TOKENS(str2, "segment"))
1123- }`);
1123+ }`).toArray() ;
11241124 ~ analyzers.remove(analyzerMinHash.name);
11251125 ~ analyzers.remove(analyzerSegment.name);
11261126 @END_EXAMPLE_ARANGOSH_OUTPUT
@@ -1339,13 +1339,13 @@ longitude, latitude order:
13391339 @EXAMPLE_ARANGOSH_OUTPUT{analyzerGeoJSON}
13401340 var analyzers = require("@arangodb/analyzers ");
13411341 var a = analyzers.save("geo_json", "geojson", {}, [ ] );
1342- db._ create("geo");
1342+ var coll = db._ create("geo");
13431343 | var docs = db.geo.save([
13441344 | { location: { type: "Point", coordinates: [ 6.937, 50.932] } },
13451345 | { location: { type: "Point", coordinates: [ 6.956, 50.941] } },
13461346 | { location: { type: "Point", coordinates: [ 6.962, 50.932] } },
13471347 ] );
1348- | db._ createView("geo_view", "arangosearch", {
1348+ | var view = db._ createView("geo_view", "arangosearch", {
13491349 | links: {
13501350 | geo: {
13511351 | fields: {
@@ -1446,13 +1446,13 @@ longitude, latitude order:
14461446 @EXAMPLE_ARANGOSH_OUTPUT{analyzerGeoS2}
14471447 var analyzers = require("@arangodb/analyzers ");
14481448 var a = analyzers.save("geo_efficient", "geo_s2", { format: "latLngInt" }, [ ] );
1449- db._ create("geo");
1449+ var coll = db._ create("geo");
14501450 | var docs = db.geo.save([
14511451 | { location: { type: "Point", coordinates: [ 6.937, 50.932] } },
14521452 | { location: { type: "Point", coordinates: [ 6.956, 50.941] } },
14531453 | { location: { type: "Point", coordinates: [ 6.962, 50.932] } },
14541454 ] );
1455- | db._ createView("geo_view", "arangosearch", {
1455+ | var view = db._ createView("geo_view", "arangosearch", {
14561456 | links: {
14571457 | geo: {
14581458 | fields: {
@@ -1545,13 +1545,13 @@ The stored coordinate pairs are in latitude, longitude order, but `GEO_POINT()`
15451545 @EXAMPLE_ARANGOSH_OUTPUT{analyzerGeoPointPair}
15461546 var analyzers = require("@arangodb/analyzers ");
15471547 var a = analyzers.save("geo_pair", "geopoint", {}, [ ] );
1548- db._ create("geo");
1548+ var coll = db._ create("geo");
15491549 | var docs = db.geo.save([
15501550 | { location: [ 50.932, 6.937] },
15511551 | { location: [ 50.941, 6.956] },
15521552 | { location: [ 50.932, 6.962] },
15531553 ] );
1554- | db._ createView("geo_view", "arangosearch", {
1554+ | var view = db._ createView("geo_view", "arangosearch", {
15551555 | links: {
15561556 | geo: {
15571557 | fields: {
@@ -1589,13 +1589,13 @@ Then query for locations that are within a 3 kilometer radius of a given point:
15891589 | latitude: [ "lat"] ,
15901590 | longitude: [ "lng"]
15911591 }, [ ] );
1592- db._ create("geo");
1592+ var coll = db._ create("geo");
15931593 | var docs = db.geo.save([
15941594 | { location: { lat: 50.932, lng: 6.937 } },
15951595 | { location: { lat: 50.941, lng: 6.956 } },
15961596 | { location: { lat: 50.932, lng: 6.962 } },
15971597 ] );
1598- | db._ createView("geo_view", "arangosearch", {
1598+ | var view = db._ createView("geo_view", "arangosearch", {
15991599 | links: {
16001600 | geo: {
16011601 | fields: {
0 commit comments