Created
April 17, 2017 14:39
-
-
Save antoniogarrote/c7ff134b2d77a3143b0234372048ec39 to your computer and use it in GitHub Desktop.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
(ns api-modelling-framework.parser.domain.common-test | |
#?(:cljs (:require-macros [cljs.test :refer [deftest is async]])) | |
(:require #?(:clj [clojure.test :refer :all]) | |
[api-modelling-framework.parser.domain.common :as common] | |
[api-modelling-framework.model.document :as document] | |
[api-modelling-framework.model.vocabulary :as v] | |
[api-modelling-framework.utils :as utils])) | |
(deftest wrapped-ast-token?-test | |
(is (common/wrapped-ast-token? {:amf-lexical-token 1})) | |
(is (not (common/wrapped-ast-token? 1))) | |
(is (not (common/wrapped-ast-token? {:a 1}))) | |
(is (not (common/wrapped-ast-token? nil))) | |
(is (not (common/wrapped-ast-token? [:a :b])))) | |
(deftest with-ast-parsing-test | |
(let [node (with-meta {:a 1} {:location true}) | |
result (common/with-ast-parsing node | |
(fn [{:keys [a]}] {:res (inc a)}))] | |
(is (= {:res 2 :lexical {:location true}} result))) | |
(let [node (with-meta {:amf-lexical-token 1} {:location true}) | |
result (common/with-ast-parsing node | |
(fn [a] {:res (inc a)}))] | |
(is (= {:res 2 :lexical {:location true}} result))) | |
(let [node {:a 1} | |
result (common/with-ast-parsing node | |
(fn [{:keys [a]}] {:res (inc a)}))] | |
(is (= {:res 2} result)))) | |
(deftest ast-value-test | |
(is (= 1 (common/ast-value {:amf-lexical-token 1}))) | |
(is (= 1 (common/ast-value 1)))) | |
(deftest ast-get-test | |
(is (= 1 (common/ast-get {:a 1} :a))) | |
(is (= 1 (common/ast-get {:a {:amf-lexical-token 1}} :a))) | |
(is (= 2 (common/ast-get {:a {:amf-lexical-token 1}} :b 2))) | |
(is (= 2 (common/ast-get {:a 1} :b 2)))) | |
(deftest ast-assoc-test | |
(is (= {:a 1} (common/ast-assoc {} :a 1))) | |
(is (= {:a 1} (common/ast-assoc {} :a {:amf-lexical-token 1})))) |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
diff --git a/js/js-support-bundle.js b/js/js-support-bundle.js | |
index 3c17406..99d2e01 100644 | |
--- a/js/js-support-bundle.js | |
+++ b/js/js-support-bundle.js | |
@@ -1245,7 +1245,7 @@ function mergeMappings(state, destination, source, overridableKeys) { | |
function storeMappingPair(state, _result, overridableKeys, keyTag, keyNode, valueNode) { | |
var index, quantity; | |
- keyNode = String(keyNode); | |
+ keyNode = String(keyNode["amf-lexical-token"]); | |
if (_result === null) { | |
_result = {}; | |
@@ -1376,6 +1376,10 @@ function readPlainScalar(state, nodeIndent, withinFlowCollection) { | |
_result = state.result, | |
ch; | |
+ var startLine = state.line; | |
+ var startColumn = state.lineStart; | |
+ var startIndex = state.position; | |
+ | |
ch = state.input.charCodeAt(state.position); | |
if (is_WS_OR_EOL(ch) || | |
@@ -1464,6 +1468,33 @@ function readPlainScalar(state, nodeIndent, withinFlowCollection) { | |
captureSegment(state, captureStart, captureEnd, false); | |
if (state.result) { | |
+ var endLine = state.line; | |
+ var endColumn = state.position - state.lineStart; | |
+ var endIndex = state.position; | |
+ | |
+ var positions = { | |
+ "start-line": startLine, | |
+ "start-column": startColumn, | |
+ "start-index": startIndex, | |
+ "end-line": endLine, | |
+ "end-column": endColumn, | |
+ "end-index": endIndex | |
+ }; | |
+ state.kind = _kind; | |
+ if (state.result === 'null') { | |
+ state.result = null; | |
+ } | |
+ if (state.result === 'true') { | |
+ state.result = true; | |
+ } | |
+ if (state.result === 'false') { | |
+ state.result = false; | |
+ } | |
+ state.result = { | |
+ "amf-lexical-token": state.result, | |
+ "__location__": positions | |
+ }; | |
+ | |
return true; | |
} | |
@@ -1476,6 +1507,10 @@ function readSingleQuotedScalar(state, nodeIndent) { | |
var ch, | |
captureStart, captureEnd; | |
+ var startLine = state.line; | |
+ var startColumn = state.lineStart; | |
+ var startIndex = state.position; | |
+ | |
ch = state.input.charCodeAt(state.position); | |
if (ch !== 0x27/* ' */) { | |
@@ -1497,6 +1532,23 @@ function readSingleQuotedScalar(state, nodeIndent) { | |
state.position++; | |
captureEnd = state.position; | |
} else { | |
+ var endLine = state.line; | |
+ var endColumn = state.position - state.lineStart; | |
+ var endIndex = state.position; | |
+ | |
+ var positions = { | |
+ "start-line": startLine, | |
+ "start-column": startColumn, | |
+ "start-index": startIndex, | |
+ "end-line": endLine, | |
+ "end-column": endColumn, | |
+ "end-index": endIndex | |
+ }; | |
+ state.result = { | |
+ "amf-lexical-token": state.result, | |
+ "__location__": positions | |
+ }; | |
+ | |
return true; | |
} | |
@@ -1525,6 +1577,10 @@ function readDoubleQuotedScalar(state, nodeIndent) { | |
tmp, | |
ch; | |
+ var startLine = state.line; | |
+ var startColumn = state.lineStart; | |
+ var startIndex = state.position; | |
+ | |
ch = state.input.charCodeAt(state.position); | |
if (ch !== 0x22/* " */) { | |
@@ -1540,6 +1596,25 @@ function readDoubleQuotedScalar(state, nodeIndent) { | |
if (ch === 0x22/* " */) { | |
captureSegment(state, captureStart, state.position, true); | |
state.position++; | |
+ | |
+ var endLine = state.line; | |
+ var endColumn = state.position - state.lineStart; | |
+ var endIndex = state.position; | |
+ | |
+ var positions = { | |
+ "start-line": startLine, | |
+ "start-column": startColumn, | |
+ "start-index": startIndex, | |
+ "end-line": endLine, | |
+ "end-column": endColumn, | |
+ "end-index": endIndex | |
+ }; | |
+ | |
+ state.result = { | |
+ "amf-lexical-token": state.result, | |
+ "__location__": positions | |
+ }; | |
+ | |
return true; | |
} else if (ch === 0x5C/* \ */) { | |
@@ -2379,13 +2454,19 @@ function composeNode(state, parentIndent, nodeContext, allowToSeek, allowCompact | |
} | |
} | |
} else if (_hasOwnProperty.call(state.typeMap[state.kind || 'fallback'], state.tag)) { | |
+ var isLexicaltoken = false; | |
+ if (state.result && state.result["amf-lexical-token"]) { | |
+ state.result = state.result["amf-lexical-token"]; | |
+ isLexicaltoken = true; | |
+ } | |
type = state.typeMap[state.kind || 'fallback'][state.tag]; | |
- if (state.result !== null && type.kind !== state.kind) { | |
+ if (!isLexicaltoken && state.result !== null && type.kind !== state.kind) { | |
throwError(state, 'unacceptable node kind for !<' + state.tag + '> tag; it should be "' + type.kind + '", not "' + state.kind + '"'); | |
} | |
if (!type.resolve(state.result)) { // `state.result` updated in resolver if matched | |
+ | |
throwError(state, 'cannot resolve a node with !<' + state.tag + '> explicit tag'); | |
} else { | |
state.result = type.construct(state.result); | |
@@ -4115,6 +4196,10 @@ var collectLibraries = function (fragment, location) { | |
var libraries = fragment.uses || {}; | |
for (var p in libraries) { | |
if (p !== "__location__") { | |
+ var value = libraries[p]; | |
+ if (typeof(value) === "object" && value["amf-lexical-token"]) { | |
+ value = value["amf-lexical-token"]; | |
+ } | |
var resolvedLocation = resolvePath(location, libraries[p]); | |
PENDING_LIBRARIES.push({ | |
"path": libraries[p], | |
@@ -5965,4 +6050,4 @@ process.chdir = function (dir) { | |
process.umask = function() { return 0; }; | |
},{}]},{},[32])(32) | |
-}); | |
\ No newline at end of file | |
+}); | |
diff --git a/js/js-yaml/lib/js-yaml/loader.js b/js/js-yaml/lib/js-yaml/loader.js | |
index 4789ec6..166031e 100644 | |
--- a/js/js-yaml/lib/js-yaml/loader.js | |
+++ b/js/js-yaml/lib/js-yaml/loader.js | |
@@ -285,7 +285,7 @@ function mergeMappings(state, destination, source, overridableKeys) { | |
function storeMappingPair(state, _result, overridableKeys, keyTag, keyNode, valueNode) { | |
var index, quantity; | |
- keyNode = String(keyNode); | |
+ keyNode = String(keyNode["amf-lexical-token"]); | |
if (_result === null) { | |
_result = {}; | |
@@ -390,7 +390,6 @@ function testDocumentSeparator(state) { | |
return true; | |
} | |
} | |
- | |
return false; | |
} | |
@@ -416,6 +415,10 @@ function readPlainScalar(state, nodeIndent, withinFlowCollection) { | |
_result = state.result, | |
ch; | |
+ var startLine = state.line; | |
+ var startColumn = state.lineStart; | |
+ var startIndex = state.position; | |
+ | |
ch = state.input.charCodeAt(state.position); | |
if (is_WS_OR_EOL(ch) || | |
@@ -504,11 +507,35 @@ function readPlainScalar(state, nodeIndent, withinFlowCollection) { | |
captureSegment(state, captureStart, captureEnd, false); | |
if (state.result) { | |
+ var endLine = state.line; | |
+ var endColumn = state.position - state.lineStart; | |
+ var endIndex = state.position; | |
+ | |
+ var positions = { | |
+ "start-line": startLine, | |
+ "start-column": startColumn, | |
+ "start-index": startIndex, | |
+ "end-line": endLine, | |
+ "end-column": endColumn, | |
+ "end-index": endIndex | |
+ }; | |
+ state.kind = _kind; | |
+ if (state.result === 'null') { | |
+ state.result = null; | |
+ } | |
+ if (state.result === 'true') { | |
+ state.result = true; | |
+ } | |
+ if (state.result === 'false') { | |
+ state.result = false; | |
+ } | |
+ state.result = { | |
+ "amf-lexical-token": state.result, | |
+ "__location__": positions | |
+ }; | |
+ | |
return true; | |
} | |
- | |
- state.kind = _kind; | |
- state.result = _result; | |
return false; | |
} | |
@@ -516,6 +543,10 @@ function readSingleQuotedScalar(state, nodeIndent) { | |
var ch, | |
captureStart, captureEnd; | |
+ var startLine = state.line; | |
+ var startColumn = state.lineStart; | |
+ var startIndex = state.position; | |
+ | |
ch = state.input.charCodeAt(state.position); | |
if (ch !== 0x27/* ' */) { | |
@@ -537,6 +568,24 @@ function readSingleQuotedScalar(state, nodeIndent) { | |
state.position++; | |
captureEnd = state.position; | |
} else { | |
+ | |
+ var endLine = state.line; | |
+ var endColumn = state.position - state.lineStart; | |
+ var endIndex = state.position; | |
+ | |
+ var positions = { | |
+ "start-line": startLine, | |
+ "start-column": startColumn, | |
+ "start-index": startIndex, | |
+ "end-line": endLine, | |
+ "end-column": endColumn, | |
+ "end-index": endIndex | |
+ }; | |
+ | |
+ state.result = { | |
+ "amf-lexical-token": state.result, | |
+ "__location__": positions | |
+ }; | |
return true; | |
} | |
@@ -565,6 +614,10 @@ function readDoubleQuotedScalar(state, nodeIndent) { | |
tmp, | |
ch; | |
+ var startLine = state.line; | |
+ var startColumn = state.lineStart; | |
+ var startIndex = state.position; | |
+ | |
ch = state.input.charCodeAt(state.position); | |
if (ch !== 0x22/* " */) { | |
@@ -580,6 +633,25 @@ function readDoubleQuotedScalar(state, nodeIndent) { | |
if (ch === 0x22/* " */) { | |
captureSegment(state, captureStart, state.position, true); | |
state.position++; | |
+ | |
+ var endLine = state.line; | |
+ var endColumn = state.position - state.lineStart; | |
+ var endIndex = state.position; | |
+ | |
+ var positions = { | |
+ "start-line": startLine, | |
+ "start-column": startColumn, | |
+ "start-index": startIndex, | |
+ "end-line": endLine, | |
+ "end-column": endColumn, | |
+ "end-index": endIndex | |
+ }; | |
+ | |
+ state.result = { | |
+ "amf-lexical-token": state.result, | |
+ "__location__": positions | |
+ }; | |
+ | |
return true; | |
} else if (ch === 0x5C/* \ */) { | |
@@ -753,6 +825,10 @@ function readBlockScalar(state, nodeIndent) { | |
tmp, | |
ch; | |
+ var startLine = state.line; | |
+ var startColumn = state.lineStart; | |
+ var startIndex = state.position; | |
+ | |
ch = state.input.charCodeAt(state.position); | |
if (ch === 0x7C/* | */) { | |
@@ -881,6 +957,24 @@ function readBlockScalar(state, nodeIndent) { | |
captureSegment(state, captureStart, state.position, false); | |
} | |
+ var endLine = state.line; | |
+ var endColumn = state.position - state.lineStart; | |
+ var endIndex = state.position; | |
+ | |
+ var positions = { | |
+ "start-line": startLine, | |
+ "start-column": startColumn, | |
+ "start-index": startIndex, | |
+ "end-line": endLine, | |
+ "end-column": endColumn, | |
+ "end-index": endIndex | |
+ }; | |
+ | |
+ state.result = { | |
+ "amf-lexical-token": state.result, | |
+ "__location__": positions | |
+ }; | |
+ | |
return true; | |
} | |
@@ -1419,9 +1513,16 @@ function composeNode(state, parentIndent, nodeContext, allowToSeek, allowCompact | |
} | |
} | |
} else if (_hasOwnProperty.call(state.typeMap[state.kind || 'fallback'], state.tag)) { | |
+ var isLexicaltoken = false; | |
+ var lexicalLocation = null; | |
+ if (state.result && state.result["amf-lexical-token"]) { | |
+ lexicalLocation = state.result["__location__"]; | |
+ state.result = state.result["amf-lexical-token"]; | |
+ isLexicaltoken = true; | |
+ } | |
type = state.typeMap[state.kind || 'fallback'][state.tag]; | |
- if (state.result !== null && type.kind !== state.kind) { | |
+ if (!isLexicaltoken && state.result !== null && type.kind !== state.kind) { | |
throwError(state, 'unacceptable node kind for !<' + state.tag + '> tag; it should be "' + type.kind + '", not "' + state.kind + '"'); | |
} | |
@@ -1429,6 +1530,9 @@ function composeNode(state, parentIndent, nodeContext, allowToSeek, allowCompact | |
throwError(state, 'cannot resolve a node with !<' + state.tag + '> explicit tag'); | |
} else { | |
state.result = type.construct(state.result); | |
+ if (lexicalLocation != null) { | |
+ state.result["__location__"] = lexicalLocation; | |
+ } | |
if (state.anchor !== null) { | |
state.anchorMap[state.anchor] = state.result; | |
} | |
diff --git a/js/yaml.js b/js/yaml.js | |
index de5d7e7..04f5669 100644 | |
--- a/js/yaml.js | |
+++ b/js/yaml.js | |
@@ -150,7 +150,11 @@ var collectLibraries = function (fragment, location) { | |
var libraries = fragment.uses || {}; | |
for (var p in libraries) { | |
if (p !== "__location__") { | |
- var resolvedLocation = resolvePath(location, libraries[p]); | |
+ var value = libraries[p]; | |
+ if (typeof(value) === "object" && value["amf-lexical-token"]) { | |
+ value = value["amf-lexical-token"]; | |
+ } | |
+ var resolvedLocation = resolvePath(location, value); | |
PENDING_LIBRARIES.push({ | |
"path": libraries[p], | |
"location": resolvedLocation, | |
diff --git a/src/api_modelling_framework/core.cljc b/src/api_modelling_framework/core.cljc | |
index ecf04cb..d8d7839 100644 | |
--- a/src/api_modelling_framework/core.cljc | |
+++ b/src/api_modelling_framework/core.cljc | |
@@ -98,7 +98,7 @@ | |
(cb (platform/<-clj res) nil) | |
(try (cb nil (to-model (raml-document-parser/parse-ast res {}))) | |
(catch #?(:clj Exception :cljs js/Error) ex | |
- (cb (platform/<-clj ex) nil))))))) | |
+ (cb (platform/<-clj ex) nil))))))) | |
(parse-string [this uri string cb] (parse-string this uri string {} cb)) | |
(parse-string [this uri string options cb] | |
(go (let [res (<! (yaml-parser/parse-string uri string options))] | |
diff --git a/src/api_modelling_framework/parser/document/raml.cljc b/src/api_modelling_framework/parser/document/raml.cljc | |
index 2100a92..4376449 100644 | |
--- a/src/api_modelling_framework/parser/document/raml.cljc | |
+++ b/src/api_modelling_framework/parser/document/raml.cljc | |
@@ -70,7 +70,6 @@ | |
(defmethod parse-ast "#%RAML 1.0" [node context] | |
(let [location (syntax/<-location node) | |
context (assoc context :base-uri location) | |
- _ (debug "Parsing RAML Document at " location) | |
fragments (or (:fragments context) (atom {})) | |
compute-fragments (make-compute-fragments fragments) | |
;; library declarations are needed to parse the model encoded into the RAML file but it will not be stored | |
diff --git a/src/api_modelling_framework/parser/domain/common.cljc b/src/api_modelling_framework/parser/domain/common.cljc | |
index 1ebae52..3015b46 100644 | |
--- a/src/api_modelling_framework/parser/domain/common.cljc | |
+++ b/src/api_modelling_framework/parser/domain/common.cljc | |
@@ -30,3 +30,39 @@ | |
(document/find-tag document/is-annotation-tag) | |
first | |
some?)) | |
+ | |
+(defn wrapped-ast-token? [node] | |
+ (not= :amf-not-found (:amf-lexical-token node :amf-not-found))) | |
+ | |
+;; would have been nice to use a macro here, but clojurescript doesn't work | |
+;; nicely with them | |
+(defn with-ast-parsing [ast-node f] | |
+ (if-let [actual-node (and (map? ast-node) | |
+ (:amf-lexical-token ast-node))] | |
+ (with-location-meta-from ast-node (f actual-node)) | |
+ (with-location-meta-from ast-node (f ast-node)))) | |
+ | |
+(defn ast-value [node] | |
+ (if (wrapped-ast-token? node) | |
+ (:amf-lexical-token node) | |
+ node)) | |
+ | |
+(defn ast-assoc | |
+ ([m k v] | |
+ (assoc m k (ast-value v)))) | |
+ | |
+(defn ast-get | |
+ ([m k] | |
+ (ast-value (get m k))) | |
+ ([m k default] (ast-value (get m k default)))) | |
+ | |
+ | |
+(defn purge-ast [x] | |
+ (cond | |
+ (and (map? x) | |
+ (wrapped-ast-token? x)) (ast-value x) | |
+ (map? x) (->> x | |
+ (mapv (fn [[k v]] [k (purge-ast (ast-value v))])) | |
+ (into {})) | |
+ (coll? x) (mapv purge-ast x) | |
+ :else (ast-value x))) | |
diff --git a/src/api_modelling_framework/parser/domain/raml.cljc b/src/api_modelling_framework/parser/domain/raml.cljc | |
index 5bcf1e3..efe6512 100644 | |
--- a/src/api_modelling_framework/parser/domain/raml.cljc | |
+++ b/src/api_modelling_framework/parser/domain/raml.cljc | |
@@ -73,13 +73,20 @@ | |
+(defn clean-ast-tokens [x] | |
+ (reduce (fn [x p] | |
+ (assoc x p (common/ast-value (get x p)))) | |
+ x | |
+ (keys x))) | |
+ | |
(defn extract-scalar | |
"Function used to unwrap an scalar value if it defined using the value syntax" | |
[x] | |
- (cond | |
- (and (map? x) (:value x)) (:value x) | |
- (coll? x) (mapv extract-scalar x) | |
- :else x)) | |
+ (let [x (common/ast-value x)] | |
+ (cond | |
+ (and (map? x) (:value x)) (common/ast-value (:value x)) | |
+ (coll? x) (mapv extract-scalar x) | |
+ :else x))) | |
(defn guess-type-from-predicates | |
"If the AST node is a string it can be one of these nodes: resource path, responses tatus or a media type, this function check for these occurrences. | |
@@ -114,6 +121,7 @@ | |
"If a type hint is available, we use that information to dispatch, otherwise we try to guess from properties" | |
[node context] | |
(cond | |
+ (common/wrapped-ast-token? node) :wrapped-ast-token | |
(some? (syntax/<-location node)) :fragment | |
(some? (:type-hint context)) (:type-hint context) | |
(string? node) (check-reference node context) | |
@@ -132,11 +140,11 @@ | |
annotation (get annotations annotation-name)] | |
(if (nil? annotation) | |
(throw (new #?(:clj Exception :cljs js/Error) (str "Cannot find annotation " p))) | |
- (do | |
- (->> (domain/map->ParsedDomainProperty {:id (document/id annotation) | |
- :name annotation-name | |
- :object (utils/annotation->jsonld model)}) | |
- (common/with-location-meta-from model)))))) | |
+ (->> (domain/map->ParsedDomainProperty {:id (document/id annotation) | |
+ :name annotation-name | |
+ :object (utils/annotation->jsonld (common/purge-ast model))}) | |
+ (clean-ast-tokens) | |
+ (common/with-location-meta-from model))))) | |
(defn with-annotations [node ctx model] | |
(if (map? node) | |
@@ -163,7 +171,8 @@ | |
(->> parameters | |
(mapv (fn [[property-name property-value]] | |
- (let [property-value (if (string? property-value) {:type property-value} property-value) | |
+ (let [property-value (common/purge-ast property-value) | |
+ property-value (if (string? property-value) {:type property-value} property-value) | |
property-type (get property-value :type "string") | |
property-value (assoc property-value :type property-type) | |
parsed-location (utils/path-join location location-segment (url/url-encode (utils/safe-str property-name))) | |
@@ -183,6 +192,7 @@ | |
:shape property-shape}] | |
(with-annotations property-value context | |
(->> (domain/map->ParsedParameter properties) | |
+ (clean-ast-tokens) | |
(common/with-location-meta-from property-value))))))))) | |
(defn base-uri->host [base-uri] | |
@@ -200,7 +210,7 @@ | |
(defn root->scheme [{:keys [protocols baseUri]}] | |
(let [baseUri (extract-scalar baseUri)] | |
(cond | |
- (some? protocols) (->> [protocols] flatten (mapv string/lower-case)) | |
+ (some? protocols) (->> [protocols] flatten (mapv extract-scalar) (mapv string/lower-case)) | |
(and (some? baseUri) | |
(string/index-of baseUri "://") | |
(some? (:protocol | |
@@ -301,7 +311,8 @@ | |
:sources (common/generate-is-annotation-sources annotation-name id parsed-location) | |
:domain allowed-targets | |
:range range}) | |
- (common/with-location-meta-from node))))) | |
+ (clean-ast-tokens) | |
+ (common/with-location-meta-from annotation-node))))) | |
{})))) | |
(defn process-traits [node {:keys [location parsed-location] :as context}] | |
@@ -344,6 +355,7 @@ | |
(reduce (fn [acc [type-name type-node]] | |
(debug (str "Processing type " type-name)) | |
(let [location-meta (meta type-node) | |
+ type-node (common/purge-ast type-node) | |
type-node (if (syntax/fragment? type-node) | |
;; avoiding situations where we transform this into an include | |
;; and then we cannot transform this back into type because there's | |
@@ -423,6 +435,7 @@ | |
:license nil | |
:endpoints nested-resources}] | |
(->> (domain/map->ParsedAPIDocumentation properties) | |
+ (clean-ast-tokens) | |
(with-annotations node context) | |
(common/with-location-meta-from node)))) | |
@@ -434,6 +447,7 @@ | |
(defn parse-traits [resource-id node references {:keys [location parsed-location] :as context}] | |
(let [traits (flatten [(:is node [])])] | |
(->> traits | |
+ (mapv common/ast-value) | |
(mapv (fn [trait-name] | |
[trait-name (-> references | |
(get (keyword trait-name)))])) | |
@@ -451,6 +465,7 @@ | |
:target (document/id trait) | |
:label "trait" | |
:arguments []}) | |
+ (clean-ast-tokens) | |
(with-annotations trait context) | |
(common/with-location-meta-from trait))) | |
(throw (new #?(:clj Exception :cljs js/Error) | |
@@ -498,6 +513,7 @@ | |
:parameters uri-parameters})] | |
(concat [(->> | |
(domain/map->ParsedEndPoint properties) | |
+ (clean-ast-tokens) | |
(with-annotations node context) | |
(common/with-location-meta-from node))] | |
(or nested-resources [])))) | |
@@ -535,6 +551,7 @@ | |
utils/ensure-not-blank) | |
:schema schema | |
:sources node-parsed-source-map})) | |
+ (clean-ast-tokens) | |
(with-annotations node context) | |
(common/with-location-meta-from node)))))))) | |
@@ -557,6 +574,7 @@ | |
:parameters (concat query-parameters query-string) | |
:headers headers | |
:payloads payloads}) | |
+ (clean-ast-tokens) | |
(with-annotations node context) | |
(common/with-location-meta-from node))))) | |
@@ -584,6 +602,7 @@ | |
utils/clean-nils)] | |
(->> | |
(domain/map->ParsedOperation properties) | |
+ (clean-ast-tokens) | |
(with-annotations node context) | |
(common/with-location-meta-from node)))) | |
@@ -615,6 +634,7 @@ | |
:payloads payloads}] | |
(->> | |
(domain/map->ParsedResponse properties) | |
+ (clean-ast-tokens) | |
(with-annotations node context) | |
(common/with-location-meta-from node)))) | |
@@ -649,7 +669,8 @@ | |
(defmethod parse-ast :type [node {:keys [location parsed-location is-fragment references] :as context}] | |
(debug "Parsing type") | |
- (let [;; the node can be the string of a type reference if that's the case, | |
+ (let [node (common/purge-ast node) | |
+ ;; the node can be the string of a type reference if that's the case, | |
;; we build a {:type TypeReference} node to process it | |
node (if (and | |
(not (shapes/inline-json-schema? node)) | |
@@ -668,6 +689,7 @@ | |
;; They have the same ID | |
(->> (domain/map->ParsedType {:id type-id | |
:shape shape}) | |
+ (clean-ast-tokens) | |
(common/with-location-meta-from node)))) | |
(defmethod parse-ast :fragment [node {:keys [location parsed-location is-fragment fragments type-hint document-parser] | |
@@ -688,32 +710,62 @@ | |
(assoc :sources nil)) | |
encoded-element) | |
parsed-location (utils/path-join parsed-location "/includes") | |
- extends (document/map->ParsedExtends {:id parsed-location | |
- :sources (generate-extend-include-fragment-sources parsed-location fragment-location) | |
- :target fragment-location | |
- :label "!includes" | |
- :arguments []})] | |
+ extends (->> (document/map->ParsedExtends {:id parsed-location | |
+ :sources (generate-extend-include-fragment-sources parsed-location fragment-location) | |
+ :target fragment-location | |
+ :label "!includes" | |
+ :arguments []}) | |
+ (clean-ast-tokens))] | |
(swap! fragments (fn [acc] | |
(if (some? (get acc fragment-location)) | |
acc | |
(assoc acc fragment-location (assoc parsed-fragment :encodes clean-encoded-element))))) | |
(condp = type-hint | |
:method (with-annotations node context | |
- (domain/map->ParsedOperation {:id parsed-location | |
- :method (utils/safe-str (:method encoded-element)) | |
- :sources encoded-element-sources | |
- :extends [extends]})) | |
+ (->> (domain/map->ParsedOperation {:id parsed-location | |
+ :method (utils/safe-str (:method encoded-element)) | |
+ :sources encoded-element-sources | |
+ :extends [extends]}) | |
+ (clean-ast-tokens))) | |
:resource (with-annotations node context | |
- (domain/map->ParsedEndPoint {:id parsed-location | |
- :path (:path encoded-element) | |
- :extends [extends] | |
- :sources encoded-element-sources})) | |
+ (->> (domain/map->ParsedEndPoint {:id parsed-location | |
+ :path (:path encoded-element) | |
+ :extends [extends] | |
+ :sources encoded-element-sources}) | |
+ (clean-ast-tokens))) | |
(let [properties {:id parsed-location | |
:label "!includes" | |
:target fragment-location}] | |
(->> | |
(document/map->ParsedIncludes properties) | |
+ (clean-ast-tokens) | |
(with-annotations node context) | |
(common/with-location-meta-from node))))))) | |
+;;(defmethod parse-ast :wrapped-ast-token [token context] | |
+;; (let [value (common/ast-value token)] | |
+;; (if (map? value) | |
+;; (let [location-info (->> value | |
+;; (mapv (fn [p token] | |
+;; [p (:__location__ token)])) | |
+;; (into {})) | |
+;; value (->> value | |
+;; (mapv (fn [p token] | |
+;; (let [token-value (common/ast-value token)] | |
+;; (if (map? token-value) | |
+;; [p token] | |
+;; [p token-value])))) | |
+;; (into {}))] | |
+;; (parse-ast value (assoc context :location-info location-info))) | |
+;; (common/with-ast-parsing token | |
+;; (fn [node] (parse-ast node context)))))) | |
+ | |
+(defmethod parse-ast :wrapped-ast-token [token context] | |
+ (let [value (common/ast-value token) | |
+ location (get token :__location__) | |
+ result (parse-ast value context)] | |
+ (if (map? result) | |
+ (assoc result :location location) | |
+ result))) | |
+ | |
(defmethod parse-ast :undefined [_ _] nil) | |
diff --git a/src/api_modelling_framework/parser/domain/raml_types_shapes.cljc b/src/api_modelling_framework/parser/domain/raml_types_shapes.cljc | |
index 94aec75..8ec0248 100644 | |
--- a/src/api_modelling_framework/parser/domain/raml_types_shapes.cljc | |
+++ b/src/api_modelling_framework/parser/domain/raml_types_shapes.cljc | |
@@ -112,7 +112,7 @@ | |
(defn parse-shape [node {:keys [parsed-location] :as context}] | |
(let [properties (->> (:properties node []) | |
(mapv (fn [[k v]] | |
- (let [property-name (utils/safe-str k) | |
+ (let [property-name (utils/safe-str k) | |
property-name (final-property-name property-name v) | |
parsed-location (utils/path-join parsed-location (str "/property/" property-name)) | |
parsed-property-target (parse-type v (assoc context :parsed-location parsed-location)) | |
@@ -216,7 +216,6 @@ | |
(defn check-reference | |
"Checks if a provided string points to one of the types defined at the APIDocumentation level" | |
[type-string {:keys [references parsed-location base-uri] :as context}] | |
- | |
(if-let [type-reference (utils/type-reference? type-string references)] | |
(let [label (or (-> type-reference :name) | |
(if (satisfies? domain/Type type-reference) (-> type-reference domain/shape :name) nil) | |
diff --git a/src/api_modelling_framework/parser/syntax/yaml.cljc b/src/api_modelling_framework/parser/syntax/yaml.cljc | |
index c84bd9c..ad1fb02 100644 | |
--- a/src/api_modelling_framework/parser/syntax/yaml.cljc | |
+++ b/src/api_modelling_framework/parser/syntax/yaml.cljc | |
@@ -63,7 +63,7 @@ | |
#?(:cljs (defn parse-string | |
([uri string options] | |
- (let [ch (chan)] | |
+ (let [ch (chan)] | |
(JS_YAML/parseYamlString uri string (clj->js options) (fn [e result] | |
(go (try (if e | |
(>! ch (ex-info (str e) e)) | |
diff --git a/test/api_modelling_framework/integration_test.cljc b/test/api_modelling_framework/integration_test.cljc | |
index 32d455c..9253b6c 100644 | |
--- a/test/api_modelling_framework/integration_test.cljc | |
+++ b/test/api_modelling_framework/integration_test.cljc | |
@@ -10,6 +10,7 @@ | |
[api-modelling-framework.parser.syntax.yaml :as yaml-parser] | |
[api-modelling-framework.utils :as utils] | |
[api-modelling-framework.utils-test :refer [cb->chan error?]] | |
+ [api-modelling-framework.parser.domain.common :refer [purge-ast]] | |
[clojure.string :as string] | |
#?(:cljs [cljs.core.async :refer [<! >! chan]]) | |
#?(:clj [api-modelling-framework.platform :refer [async]]) | |
@@ -17,6 +18,7 @@ | |
#?(:clj [clojure.test :refer [deftest is]]))) | |
+ | |
(deftest integration-test-raml->open-api | |
(async done | |
(go (let [parser (core/->RAMLParser) | |
@@ -69,7 +71,7 @@ | |
output-model | |
{}))) | |
_ (is (not (error? output-string))) | |
- output (syntax/<-data (<! (yaml-parser/parse-string "resources/world-music-api/wip.raml" output-string)))] | |
+ output (purge-ast (syntax/<-data (<! (yaml-parser/parse-string "resources/world-music-api/wip.raml" output-string))))] | |
(is (= [:Album :Track] (-> output :types keys))) | |
(is (= "SongsLib.Song" (-> output :types :Track :properties :song))) | |
@@ -113,7 +115,7 @@ | |
output-model | |
{}))) | |
_ (is (not (error? output-string))) | |
- output (syntax/<-data (<! (yaml-parser/parse-string "resources/world-music-api/wip.raml" output-string))) | |
+ output (purge-ast (syntax/<-data (<! (yaml-parser/parse-string "resources/world-music-api/wip.raml" output-string)))) | |
types (->> output | |
(filter (fn [[k v]] (string/starts-with? (utils/safe-str k) "/"))) | |
(map last) | |
@@ -141,6 +143,9 @@ | |
(into {})) | |
api-resource (get paths "/api") | |
song-resource (get paths "/songs/{songId}")] | |
+ #?(:cljs (prn (map (fn [parsed] | |
+ (is (some? (:lexical parsed)))) | |
+ (document/declares document-model)))) | |
(doseq [ref (document/references (core/document-model model))] | |
(is (some? (:raw ref)))) | |
(is (= "World Music API" (document/name api-documentation))) | |
@@ -210,7 +215,7 @@ | |
(go (let [output-document-raml (<! (cb->chan (partial core/generate-string generator-raml "resources/world-music-api/wip.raml" | |
output-document-model | |
{}))) | |
- raw (<! (yaml-parser/parse-string "resources/world-music-api/wip.raml" output-document-raml)) | |
+ raw (purge-ast (<! (yaml-parser/parse-string "resources/world-music-api/wip.raml" output-document-raml))) | |
;; JS / JAVA parsers behave in a slightly different way, that's the reason for the or | |
parsed-document-raml-output (or (syntax/<-data raw) | |
(:data (get raw (keyword "resources/world-music-api/wip.raml"))))] | |
@@ -227,7 +232,7 @@ | |
(go (let [output-domain-raml (<! (cb->chan (partial core/generate-string generator-raml "resources/world-music-api/wip.raml" | |
output-domain-model | |
{}))) | |
- raw (<! (yaml-parser/parse-string "resources/world-music-api/wip.raml" output-domain-raml)) | |
+ raw (purge-ast (<! (yaml-parser/parse-string "resources/world-music-api/wip.raml" output-domain-raml))) | |
;; JS / JAVA parsers behave in a slightly different way, that's the reason for the or | |
parsed-domain-raml-output (or | |
;; java | |
@@ -324,7 +329,7 @@ | |
output-raml (<! (cb->chan (partial core/generate-string generator-raml "resources/petstore.raml" | |
output-model | |
{}))) | |
- yaml-data (syntax/<-data (<! (yaml-parser/parse-string "resources/petstore.raml" output-raml))) | |
+ yaml-data (purge-ast (syntax/<-data (<! (yaml-parser/parse-string "resources/petstore.raml" output-raml)))) | |
output-jsonld (<! (cb->chan (partial core/generate-string generator-jsonld "resources/petstore.jsonld" | |
output-model | |
{})))] | |
@@ -411,7 +416,7 @@ | |
output-yaml(<! (cb->chan (partial core/generate-string generator "resources/api.raml" | |
(core/document-model read-model) | |
{}))) | |
- yaml-data (syntax/<-data (<! (yaml-parser/parse-string "resources/world-music-api/wip.raml" output-yaml)))] | |
+ yaml-data (purge-ast (syntax/<-data (<! (yaml-parser/parse-string "resources/world-music-api/wip.raml" output-yaml))))] | |
(is (some? (-> yaml-data :types :User))) | |
(is (= (-> yaml-data (get (keyword "/users")) :get :responses :200 :body) "User")) | |
(done))))) | |
@@ -432,7 +437,7 @@ | |
output-yaml(<! (cb->chan (partial core/generate-string generator "resources/api.raml" | |
(core/document-model read-model) | |
{}))) | |
- yaml-data (syntax/<-data (<! (yaml-parser/parse-string "resources/world-music-api/wip.raml" output-yaml)))] | |
+ yaml-data (purge-ast (syntax/<-data (<! (yaml-parser/parse-string "resources/world-music-api/wip.raml" output-yaml))))] | |
(is (some? (-> yaml-data :types :User))) | |
(is (= (-> yaml-data (get (keyword "/users")) :get :responses :200 :body) "User")) | |
(done))))) | |
@@ -449,7 +454,7 @@ | |
output-yaml(<! (cb->chan (partial core/generate-string generator "resources/api.raml" | |
output-model | |
{}))) | |
- yaml-data (syntax/<-data (<! (yaml-parser/parse-string "resources/tck/raml-1.0/Fragments/test001/fragment.raml" output-yaml))) | |
+ yaml-data (purge-ast (syntax/<-data (<! (yaml-parser/parse-string "resources/tck/raml-1.0/Fragments/test001/fragment.raml" output-yaml)))) | |
output-jsonld (<! (cb->chan (partial core/generate-string jsonld-generator "resources/api.raml" | |
output-model | |
{:source-maps? false}))) | |
@@ -495,7 +500,7 @@ | |
output-raml (<! (cb->chan (partial core/generate-string generator-raml "resources/uber.raml" | |
output-model | |
{}))) | |
- yaml-data (syntax/<-data (<! (yaml-parser/parse-string "resources/petstore.raml" output-raml))) | |
+ yaml-data (purge-ast (syntax/<-data (<! (yaml-parser/parse-string "resources/petstore.raml" output-raml)))) | |
output-jsonld (<! (cb->chan (partial core/generate-string generator-jsonld "resources/uber.jsonld" | |
output-model | |
{})))] | |
diff --git a/test/api_modelling_framework/runner.cljs b/test/api_modelling_framework/runner.cljs | |
index 17dd544..961a45f 100644 | |
--- a/test/api_modelling_framework/runner.cljs | |
+++ b/test/api_modelling_framework/runner.cljs | |
@@ -17,6 +17,7 @@ | |
[api-modelling-framework.generators.document.openapi-test] | |
[api-modelling-framework.model.document-test] | |
[api-modelling-framework.core-test] | |
+ [api-modelling-framework.parser.domain.common-test] | |
)) | |
(doo-tests 'api-modelling-framework.tck | |
@@ -36,4 +37,5 @@ | |
'api-modelling-framework.generators.document.raml-test | |
'api-modelling-framework.generators.document.openapi-test | |
'api-modelling-framework.core-test | |
+ 'api-modelling-framework.parser.domain.common-test | |
) | |
diff --git a/test/api_modelling_framework/tck.cljc b/test/api_modelling_framework/tck.cljc | |
index aa03007..e01482d 100644 | |
--- a/test/api_modelling_framework/tck.cljc | |
+++ b/test/api_modelling_framework/tck.cljc | |
@@ -10,6 +10,7 @@ | |
[api-modelling-framework.parser.syntax.yaml :as yaml-parser] | |
[api-modelling-framework.utils :as utils] | |
[api-modelling-framework.utils-test :refer [cb->chan error?]] | |
+ [api-modelling-framework.parser.domain.common :refer [purge-ast]] | |
#?(:clj [com.georgejahad.difform :as difform]) | |
[clojure.string :as string] | |
#?(:cljs [cljs.nodejs :as nodejs]) | |
@@ -208,12 +209,15 @@ | |
(defn to-data-structure [uri type s] | |
(go (condp = type | |
- :raml (->> (yaml-parser/parse-string uri (-> s | |
- (string/replace ".openapi" ".raml") | |
- (string/replace ".jsonld" ".raml"))) | |
- <! | |
- -success-> | |
- clean-fragments) | |
+ :raml | |
+ (->> (yaml-parser/parse-string uri (-> s | |
+ (string/replace ".openapi" ".raml") | |
+ (string/replace ".jsonld" ".raml"))) | |
+ <! | |
+ -success-> | |
+ ;;prn | |
+ purge-ast | |
+ clean-fragments) | |
:openapi (platform/decode-json s) | |
:jsonld (platform/decode-json s)))) | |
@@ -255,7 +259,7 @@ | |
{:source-maps? false | |
:full-graph? false})))) | |
doc-generated (<! (to-data-structure generated-file-name type raw-generated-data))] | |
- ;;(println generated-jsonld) | |
+ (prn raw-generated-data) | |
(is (same-structure? (ensure-not-nil (clean-ids (platform/decode-json generated-jsonld))) | |
(ensure-not-nil (clean-ids target)))) | |
(is (same-structure? (ensure-not-nil (clean-ids doc-generated)) | |
@@ -265,7 +269,6 @@ | |
(go (doseq [[from to] ;[[:openapi :raml]] | |
conversions | |
] | |
- (println "\n\nCOMPARING " from " -> " to "\n\n") | |
(let [source (get files from) | |
target (get files to) | |
target (->> (target-file files to from) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment
here second argument should be
value