Merge branch 'main' into feat/fuzzing

This commit is contained in:
InversionSpaces 2023-09-19 11:14:11 +00:00
commit 2af3e0c012
376 changed files with 17303 additions and 60962 deletions

134
.github/e2e/docker-compose.yml vendored Normal file
View File

@ -0,0 +1,134 @@
networks:
nox:
driver: bridge
ipam:
config:
- subnet: 10.50.10.0/24
services:
nox-1:
image: ${NOX_IMAGE}
ports:
- 7771:7771
- 9991:9991
command:
- --aqua-pool-size=2
- -t=7771
- -w=9991
- -x=10.50.10.10
- --external-maddrs
- /dns4/nox-1/tcp/7771
- /dns4/nox-1/tcp/9991/ws
- --allow-private-ips
- --local
# - --bootstraps=/dns/nox-1/tcp/7771
# 12D3KooWBM3SdXWqGaawQDGQ6JprtwswEg3FWGvGhmgmMez1vRbR
- -k=hK62afickoeP2uZbmSkAYXxxqP8ozq16VRN7qfTP719EHC5V5tjrtW57BSjUr8GvsEXmJRbtejUWyPZ2rZMyQdq
networks:
nox:
ipv4_address: 10.50.10.10
nox-2:
image: ${NOX_IMAGE}
ports:
- 7772:7772
- 9992:9992
command:
- --aqua-pool-size=2
- -t=7772
- -w=9992
- -x=10.50.10.20
- --external-maddrs
- /dns4/nox-2/tcp/7772
- /dns4/nox-2/tcp/9992/ws
- --allow-private-ips
- --bootstraps=/dns/nox-1/tcp/7771
# 12D3KooWQdpukY3p2DhDfUfDgphAqsGu5ZUrmQ4mcHSGrRag6gQK
- -k=2WijTVdhVRzyZamWjqPx4V4iNMrajegNMwNa2PmvPSZV6RRpo5M2fsPWdQr22HVRubuJhhSw8BrWiGt6FPhFAuXy
networks:
nox:
ipv4_address: 10.50.10.20
nox-3:
image: ${NOX_IMAGE}
ports:
- 7773:7773
- 9993:9993
command:
- --aqua-pool-size=2
- -t=7773
- -w=9993
- -x=10.50.10.30
- --external-maddrs
- /dns4/nox-3/tcp/7773
- /dns4/nox-3/tcp/9993/ws
- --allow-private-ips
- --bootstraps=/dns/nox-1/tcp/7771
# 12D3KooWRT8V5awYdEZm6aAV9HWweCEbhWd7df4wehqHZXAB7yMZ
- -k=2n2wBVanBeu2GWtvKBdrYK9DJAocgG3PrTUXMharq6TTfxqTL4sLdXL9BF23n6rsnkAY5pR9vBtx2uWYDQAiZdrX
networks:
nox:
ipv4_address: 10.50.10.30
nox-4:
image: ${NOX_IMAGE}
ports:
- 7774:7774
- 9994:9994
command:
- --aqua-pool-size=2
- -t=7774
- -w=9994
- -x=10.50.10.40
- --external-maddrs
- /dns4/nox-4/tcp/7774
- /dns4/nox-4/tcp/9994/ws
- --allow-private-ips
- --bootstraps=/dns/nox-1/tcp/7771
# 12D3KooWBzLSu9RL7wLP6oUowzCbkCj2AGBSXkHSJKuq4wwTfwof
- -k=4zp8ucAikkjB8CmkufYiFBW4QCDUCbQG7yMjviX7W8bMyN5rfChQ2Pi5QCWThrCTbAm9uq5nbFbxtFcNZq3De4dX
networks:
nox:
ipv4_address: 10.50.10.40
nox-5:
image: ${NOX_IMAGE}
ports:
- 7775:7775
- 9995:9995
command:
- --aqua-pool-size=2
- -t=7775
- -w=9995
- -x=10.50.10.50
- --external-maddrs
- /dns4/nox-5/tcp/7775
- /dns4/nox-5/tcp/9995/ws
- --allow-private-ips
- --bootstraps=/dns/nox-1/tcp/7771
# 12D3KooWBf6hFgrnXwHkBnwPGMysP3b1NJe5HGtAWPYfwmQ2MBiU
- -k=3ry26rm5gkJXvdqRH4FoM3ezWq4xVVsBQF7wtKq4E4pbuaa6p1F84tNqifUS7DdfJL9hs2gcdW64Wc342vHZHMUp
networks:
nox:
ipv4_address: 10.50.10.50
nox-6:
image: ${NOX_IMAGE}
ports:
- 7776:7776
- 9996:9996
command:
- --aqua-pool-size=2
- -t=7776
- -w=9996
- --bootstraps=/dns/nox-1/tcp/7771
- -x=10.50.10.60
- --external-maddrs
- /dns4/nox-6/tcp/7776
- /dns4/nox-6/tcp/9996/ws
- --allow-private-ips
# 12D3KooWPisGn7JhooWhggndz25WM7vQ2JmA121EV8jUDQ5xMovJ
- -k=5Qh8bB1sF28uLPwr3HTvEksCeC6mAWQvebCfcgv9y6j4qKwSzNKm2tzLUg4nACUEo2KZpBw11gNCnwaAdM7o1pEn
networks:
nox:
ipv4_address: 10.50.10.60

View File

@ -1,3 +1,3 @@
{
".": "0.11.7"
".": "0.12.1"
}

View File

@ -1,65 +0,0 @@
name: "Publish binary aqua"
on:
workflow_dispatch:
inputs:
runs-on:
type: string
required: true
arch:
type: string
required: true
os:
type: string
required: true
static:
type: boolean
required: true
workflow_call:
inputs:
runs-on:
type: string
required: true
arch:
type: string
required: true
os:
type: string
required: true
static:
type: boolean
required: true
jobs:
build:
name: "Publish aqua-native"
runs-on: ${{ inputs.runs-on }}
timeout-minutes: 10
steps:
- name: Checkout
uses: actions/checkout@v3
- uses: graalvm/setup-graalvm@v1
with:
version: '22.3.1'
java-version: '17'
set-java-home: true
components: 'native-image'
github-token: ${{ secrets.GITHUB_TOKEN }}
- uses: coursier/cache-action@v6
- uses: coursier/setup-action@v1
with:
apps: sbt
- name: build
run: sbt "cli/GraalVMNativeImage/packageBin"
env:
COMPILE_STATIC: ${{ inputs.static }}
- uses: actions/upload-artifact@v3
with:
name: aqua-${{ inputs.os }}-${{ inputs.arch }}
path: cli/cli/.jvm/target/graalvm-native-image/cli

View File

@ -27,6 +27,7 @@ concurrency:
jobs:
aqua:
name: "aqua"
if: >
github.event_name == 'push' ||
contains(github.event.pull_request.labels.*.name, 'e2e')
@ -34,38 +35,22 @@ jobs:
with:
ref: ${{ github.ref }}
aqua-playground:
flox-snapshot:
name: "flox"
needs: aqua
uses: fluencelabs/aqua-playground/.github/workflows/tests.yml@master
uses: fluencelabs/flox/.github/workflows/snapshot.yml@main
with:
aqua-version: "${{ needs.aqua.outputs.aqua-version }}"
aqua-snapshots: "${{ needs.aqua.outputs.aqua-snapshots }}"
registry:
needs: aqua
uses: fluencelabs/registry/.github/workflows/tests.yml@main
with:
aqua-version: "${{ needs.aqua.outputs.aqua-version }}"
fluence-cli:
flox:
needs: aqua
uses: fluencelabs/fluence-cli/.github/workflows/tests.yml@main
with:
aqua-api-version: "${{ needs.aqua.outputs.aqua-api-version }}"
aqua-snapshots: "${{ needs.aqua.outputs.aqua-snapshots }}"
aqua-native:
name: "aqua"
strategy:
matrix:
runner:
- runs-on: ubuntu-latest
arch: amd64
os: linux
static: true
registry:
needs:
- aqua
uses: ./.github/workflows/binary.yml
- flox-snapshot
uses: fluencelabs/registry/.github/workflows/tests.yml@main
with:
runs-on: ${{ matrix.runner.runs-on }}
arch: ${{ matrix.runner.arch }}
os: ${{ matrix.runner.os }}
static: ${{ matrix.runner.static }}
flox-version: "${{ needs.flox-snapshot.outputs.version }}"

View File

@ -24,7 +24,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v3
uses: actions/checkout@v4
- name: Lint actions
uses: reviewdog/action-actionlint@v1

View File

@ -18,13 +18,17 @@ env:
FORCE_COLOR: true
jobs:
compile:
publish:
runs-on: builder
timeout-minutes: 60
permissions:
contents: read
id-token: write
steps:
- name: Checkout repository
uses: actions/checkout@v3
uses: actions/checkout@v4
with:
ref: ${{ inputs.tag }}
@ -36,60 +40,11 @@ jobs:
with:
apps: sbt
- name: JS CLI build
run: sbt cliJS/fullLinkJS
- name: JS API build
run: sbt aqua-apiJS/fullLinkJS
- name: JS LSP build
run: sbt language-server-apiJS/fullLinkJS
- name: Upload aqua-cli artifact
uses: actions/upload-artifact@v3
with:
name: aqua-cli
path: cli/cli/.js/target/scala-*/cli-opt/main.js
- name: Upload aqua-api artifact
uses: actions/upload-artifact@v3
with:
name: aqua-api
path: api/aqua-api/.js/target/scala-*/aqua-api-opt/main.js
- name: Upload aqua-lsp artifact
uses: actions/upload-artifact@v3
with:
name: aqua-lsp
path: language-server/language-server-api/.js/target/scala-*/language-server-api-opt/main.js
aqua-cli:
name: "Publish aqua-cli"
runs-on: ubuntu-latest
timeout-minutes: 60
needs:
- compile
permissions:
contents: read
id-token: write
steps:
- name: Checkout repository
uses: actions/checkout@v3
with:
ref: ${{ inputs.tag }}
- name: Download aqua-cli artifact
uses: actions/download-artifact@v3
with:
name: aqua-cli
- run: mv scala-*/*/main.js cli/cli-npm/aqua.js
- name: scala-js build
run: sbt ";language-server-apiJS/fullBundleJS;aqua-apiJS/fullBundleJS"
- name: Import secrets
uses: hashicorp/vault-action@v2.5.0
uses: hashicorp/vault-action@v2.7.3
with:
url: https://vault.fluence.dev
path: jwt/github
@ -101,148 +56,20 @@ jobs:
secrets: |
kv/npmjs/fluencebot token | NODE_AUTH_TOKEN
- name: Setup node
- name: Setup pnpm
uses: pnpm/action-setup@v2.4.0
with:
version: 8
- name: Setup node with self-hosted npm registry
uses: actions/setup-node@v3
with:
node-version: "16"
node-version: "18"
registry-url: "https://registry.npmjs.org"
cache-dependency-path: "cli/cli-npm/package-lock.json"
cache: "npm"
cache: "pnpm"
- run: npm i
working-directory: cli/cli-npm
- run: npm run build
working-directory: cli/cli-npm
- run: pnpm --filter='!integration-tests' -r i
- run: pnpm --filter='!integration-tests' -r build
- name: Publish to NPM registry
run: npm publish --access public --tag unstable
working-directory: cli/cli-npm
aqua-api:
name: "Publish aqua-api"
runs-on: ubuntu-latest
timeout-minutes: 60
needs:
- compile
permissions:
contents: read
id-token: write
steps:
- name: Checkout repository
uses: actions/checkout@v3
with:
ref: ${{ inputs.tag }}
- name: Download aqua-api artifact
uses: actions/download-artifact@v3
with:
name: aqua-api
- run: mv scala-*/*/main.js api/aqua-api-npm/aqua-api.js
- name: Import secrets
uses: hashicorp/vault-action@v2.5.0
with:
url: https://vault.fluence.dev
path: jwt/github
role: ci
method: jwt
jwtGithubAudience: "https://github.com/fluencelabs"
jwtTtl: 300
exportToken: false
secrets: |
kv/npmjs/fluencebot token | NODE_AUTH_TOKEN
- name: Setup node
uses: actions/setup-node@v3
with:
node-version: "16"
registry-url: "https://registry.npmjs.org"
cache-dependency-path: "api/aqua-api-npm/package-lock.json"
cache: "npm"
- run: npm i
working-directory: api/aqua-api-npm
- name: Publish to NPM registry
run: npm publish --access public --tag unstable
working-directory: api/aqua-api-npm
aqua-lsp:
name: "Publish aqua-lsp"
runs-on: ubuntu-latest
timeout-minutes: 60
needs:
- compile
permissions:
contents: read
id-token: write
steps:
- name: Checkout repository
uses: actions/checkout@v3
with:
ref: ${{ inputs.tag }}
- name: Download aqua-lsp artifact
uses: actions/download-artifact@v3
with:
name: aqua-lsp
- run: mv scala-*/*/main.js language-server/language-server-npm/aqua-lsp-api.js
- name: Import secrets
uses: hashicorp/vault-action@v2.5.0
with:
url: https://vault.fluence.dev
path: jwt/github
role: ci
method: jwt
jwtGithubAudience: "https://github.com/fluencelabs"
jwtTtl: 300
exportToken: false
secrets: |
kv/npmjs/fluencebot token | NODE_AUTH_TOKEN
- name: Setup node
uses: actions/setup-node@v3
with:
node-version: "16"
registry-url: "https://registry.npmjs.org"
cache-dependency-path: "language-server/language-server-npm/package-lock.json"
cache: "npm"
- run: npm i
working-directory: language-server/language-server-npm
- name: Publish to NPM registry
run: npm publish --access public --tag unstable
working-directory: language-server/language-server-npm
aqua-native:
name: "Publish aqua-native"
strategy:
matrix:
runner:
- runs-on: ubuntu-latest
arch: amd64
os: linux
static: true
- runs-on: macos-latest
arch: amd64
os: macos
static: false
needs:
- compile
uses: ./.github/workflows/binary.yml
with:
runs-on: ${{ matrix.runner.runs-on }}
arch: ${{ matrix.runner.arch }}
os: ${{ matrix.runner.os }}
static: ${{ matrix.runner.static }}
run: pnpm --filter='!integration-tests' publish --access public --tag unstable

View File

@ -47,9 +47,10 @@ jobs:
steps:
- name: Checkout
uses: actions/checkout@v3
uses: actions/checkout@v4
with:
ref: ${{ fromJson(needs.release-please.outputs.pr).headBranchName }}
token: ${{ secrets.FLUENCEBOT_RELEASE_PLEASE_PAT }}
- name: Get aqua version
id: version
@ -61,18 +62,34 @@ jobs:
run: |
sed -i 's/aquaVersion =.*/aquaVersion = "${{ steps.version.outputs.version }}"/g' build.sbt
- name: Set cli version
run: npm version ${{ steps.version.outputs.version }}
working-directory: cli/cli-npm
- name: Setup pnpm
uses: pnpm/action-setup@v2.4.0
with:
version: 8
- name: Setup node with self-hosted npm registry
uses: actions/setup-node@v3
with:
node-version: "18"
registry-url: "https://registry.npmjs.org"
cache: "pnpm"
- name: Set api version
run: npm version ${{ steps.version.outputs.version }}
working-directory: api/aqua-api-npm
run: pnpm version ${{ steps.version.outputs.version }}
working-directory: api/api-npm
- name: Set lsp version
run: npm version ${{ steps.version.outputs.version }}
run: pnpm version ${{ steps.version.outputs.version }}
working-directory: language-server/language-server-npm
- name: Set aqua version in tests
run: |
pnpm add @fluencelabs/aqua-api@${{ steps.version.outputs.version }} --save-workspace-protocol=false
working-directory: integration-tests
- name: Regenerate lock
run: pnpm -r i
- name: Commit version bump
uses: stefanzweifel/git-auto-commit-action@v4
with:
@ -116,7 +133,7 @@ jobs:
echo "found any?:" "${{ steps.status.outputs.found }}"
- name: Import secrets
uses: hashicorp/vault-action@v2.5.0
uses: hashicorp/vault-action@v2.7.3
with:
url: https://vault.fluence.dev
path: jwt/github

View File

@ -21,6 +21,12 @@ concurrency:
cancel-in-progress: true
jobs:
sbt-tests:
name: "aqua"
uses: ./.github/workflows/sbt-tests.yml
with:
ref: ${{ github.ref }}
tests:
name: "aqua"
uses: ./.github/workflows/tests.yml

32
.github/workflows/sbt-tests.yml vendored Normal file
View File

@ -0,0 +1,32 @@
name: Run sbt tests with workflow_call
on:
workflow_call:
inputs:
ref:
description: "git ref to checkout to"
type: string
default: "main"
jobs:
tests:
name: "Run sbt tests"
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
repository: fluencelabs/aqua
ref: ${{ inputs.ref }}
- name: Cache Scala
uses: coursier/cache-action@v6
- name: Setup Scala
uses: coursier/setup-action@v1
with:
apps: sbt
- name: Run tests
run: sbt test

View File

@ -3,37 +3,35 @@ name: Publish snapshots
on:
workflow_call:
inputs:
fluence-js-version:
description: "@fluencejs/fluence version"
type: string
default: "null"
ref:
description: "git ref to checkout to"
type: string
default: "main"
outputs:
aqua-version:
description: "@fluencelabs/aqua version"
value: ${{ jobs.aqua-cli.outputs.version }}
aqua-lsp-version:
description: "@fluencelabs/aqua-language-server-api version"
value: ${{ jobs.aqua-lsp.outputs.version }}
aqua-api-version:
description: "@fluencelabs/aqua-api version"
value: ${{ jobs.aqua-api.outputs.version }}
aqua-snapshots:
description: "aqua snapshots"
value: ${{ jobs.publish.outputs.snapshots }}
env:
FORCE_COLOR: true
CI: true
jobs:
compile:
name: "Compile"
publish:
name: "Publish snapshots"
runs-on: builder
timeout-minutes: 60
permissions:
contents: read
id-token: write
outputs:
snapshots: "${{ steps.snapshot.outputs.snapshots }}"
steps:
- name: Checkout repository
uses: actions/checkout@v3
uses: actions/checkout@v4
with:
repository: fluencelabs/aqua
ref: ${{ inputs.ref }}
@ -50,69 +48,13 @@ jobs:
with:
apps: sbt
- name: JS CLI build
- name: scala-js build
env:
SNAPSHOT: ${{ steps.version.outputs.id }}
run: sbt cliJS/fastOptJS
- name: JS LSP build
env:
SNAPSHOT: ${{ steps.version.outputs.id }}
run: sbt language-server-apiJS/fastOptJS
- name: JS API build
env:
SNAPSHOT: ${{ steps.version.outputs.id }}
run: sbt aqua-apiJS/fastOptJS
- name: Upload aqua-cli artifact
uses: actions/upload-artifact@v3
with:
name: aqua-cli
path: cli/cli/.js/target/scala-*/cli-fastopt.js
- name: Upload aqua-api artifact
uses: actions/upload-artifact@v3
with:
name: aqua-api
path: api/aqua-api/.js/target/scala-*/aqua-api-fastopt.js
- name: Upload aqua-lsp artifact
uses: actions/upload-artifact@v3
with:
name: aqua-lsp
path: language-server/language-server-api/.js/target/scala-*/language-server-api-fastopt.js
aqua-cli:
name: "Publish aqua-cli"
runs-on: ubuntu-latest
timeout-minutes: 60
needs: compile
outputs:
version: "${{ steps.snapshot.outputs.version }}"
permissions:
contents: read
id-token: write
steps:
- name: Checkout repository
uses: actions/checkout@v3
with:
repository: fluencelabs/aqua
ref: ${{ inputs.ref }}
- name: Download aqua-cli artifact
uses: actions/download-artifact@v3
with:
name: aqua-cli
- run: mv scala-*/cli-fastopt.js cli/cli-npm/aqua.js
run: sbt ";language-server-apiJS/fastBundleJS;aqua-apiJS/fastBundleJS"
- name: Import secrets
uses: hashicorp/vault-action@v2.5.0
uses: hashicorp/vault-action@v2.7.3
with:
url: https://vault.fluence.dev
path: jwt/github
@ -124,163 +66,28 @@ jobs:
secrets: |
kv/npm-registry/basicauth/ci token | NODE_AUTH_TOKEN
- name: Setup node with self-hosted npm registry
uses: actions/setup-node@v3
- name: Setup pnpm
uses: pnpm/action-setup@v2.4.0
with:
node-version: "16"
registry-url: "https://npm.fluence.dev"
cache-dependency-path: "cli/cli-npm/package-lock.json"
cache: "npm"
- run: npm i
working-directory: cli/cli-npm
- name: Set fluence-js version from branch
if: inputs.fluence-js-version != 'null'
working-directory: cli/cli-npm
run: npm i --save -E @fluencelabs/fluence@${{ inputs.fluence-js-version }}
- run: npm run build
working-directory: cli/cli-npm
- name: Generate snapshot version
id: version
uses: fluencelabs/github-actions/generate-snapshot-id@main
- name: Publish snapshot
id: snapshot
uses: fluencelabs/github-actions/npm-publish-snapshot@main
with:
working-directory: cli/cli-npm
id: ${{ steps.version.outputs.id }}
aqua-api:
name: "Publish aqua-api"
runs-on: ubuntu-latest
timeout-minutes: 60
needs: compile
outputs:
version: "${{ steps.snapshot.outputs.version }}"
permissions:
contents: read
id-token: write
steps:
- name: Checkout repository
uses: actions/checkout@v3
with:
repository: fluencelabs/aqua
ref: ${{ inputs.ref }}
- name: Download aqua-api artifact
uses: actions/download-artifact@v3
with:
name: aqua-api
- run: mv scala-*/aqua-api-fastopt.js api/aqua-api-npm/aqua-api.js
- name: Import secrets
uses: hashicorp/vault-action@v2.5.0
with:
url: https://vault.fluence.dev
path: jwt/github
role: ci
method: jwt
jwtGithubAudience: "https://github.com/fluencelabs"
jwtTtl: 300
exportToken: false
secrets: |
kv/npm-registry/basicauth/ci token | NODE_AUTH_TOKEN
version: 8
- name: Setup node with self-hosted npm registry
uses: actions/setup-node@v3
with:
node-version: "16"
node-version: "18"
registry-url: "https://npm.fluence.dev"
cache-dependency-path: "api/aqua-api-npm/package-lock.json"
cache: "npm"
cache: "pnpm"
- run: npm i
working-directory: api/aqua-api-npm
- run: pnpm -r i
- name: Set fluence-js version from branch
if: inputs.fluence-js-version != 'null'
working-directory: cli/cli-npm
run: npm i --save-dev -E @fluencelabs/fluence@${{ inputs.fluence-js-version }}
- name: Set package version
run: node ci.cjs bump-version ${{ steps.version.outputs.id }}
- name: Generate snapshot version
id: version
uses: fluencelabs/github-actions/generate-snapshot-id@main
- run: pnpm -r build
- name: Publish snapshot
id: snapshot
uses: fluencelabs/github-actions/npm-publish-snapshot@main
uses: fluencelabs/github-actions/pnpm-publish-snapshot@main
with:
working-directory: api/aqua-api-npm
id: ${{ steps.version.outputs.id }}
aqua-lsp:
name: "Publish aqua-lsp"
runs-on: ubuntu-latest
timeout-minutes: 60
needs: compile
outputs:
version: "${{ steps.snapshot.outputs.version }}"
permissions:
contents: read
id-token: write
steps:
- name: Checkout repository
uses: actions/checkout@v3
with:
repository: fluencelabs/aqua
ref: ${{ inputs.ref }}
- name: Download aqua-lsp artifact
uses: actions/download-artifact@v3
with:
name: aqua-lsp
- run: mv scala-*/language-server-api-fastopt.js language-server/language-server-npm/aqua-lsp-api.js
- name: Import secrets
uses: hashicorp/vault-action@v2.5.0
with:
url: https://vault.fluence.dev
path: jwt/github
role: ci
method: jwt
jwtGithubAudience: "https://github.com/fluencelabs"
jwtTtl: 300
exportToken: false
secrets: |
kv/npm-registry/basicauth/ci token | NODE_AUTH_TOKEN
- name: Setup node with self-hosted npm registry
uses: actions/setup-node@v3
with:
node-version: "16"
registry-url: "https://npm.fluence.dev"
cache-dependency-path: "language-server/language-server-npm/package-lock.json"
cache: "npm"
- run: npm i
working-directory: language-server/language-server-npm
- name: Generate snapshot version
id: version
uses: fluencelabs/github-actions/generate-snapshot-id@main
- name: Publish snapshot
id: snapshot
uses: fluencelabs/github-actions/npm-publish-snapshot@main
with:
working-directory: language-server/language-server-npm
id: ${{ steps.version.outputs.id }}
set-version: false

View File

@ -7,19 +7,72 @@ on:
description: "git ref to checkout to"
type: string
default: "main"
fluence-env:
description: "Fluence enviroment to run tests agains"
type: string
default: "local"
nox-image:
description: "nox image tag"
type: string
default: "fluencelabs/nox:unstable_minimal"
js-client-snapshots:
description: "js-client snapshots"
type: string
default: "null"
env:
FORCE_COLOR: true
NOX_IMAGE: "${{ inputs.nox-image }}"
FLUENCE_ENV: "${{ inputs.fluence-env }}"
jobs:
tests:
name: "Run sbt tests"
aqua:
name: "Run tests"
runs-on: ubuntu-latest
timeout-minutes: 60
permissions:
contents: read
id-token: write
steps:
- name: Checkout repository
uses: actions/checkout@v3
- name: Import secrets
uses: hashicorp/vault-action@v2.7.3
with:
url: https://vault.fluence.dev
path: jwt/github
role: ci
method: jwt
jwtGithubAudience: "https://github.com/fluencelabs"
jwtTtl: 300
secrets: |
kv/docker-registry/basicauth/ci username | DOCKER_USERNAME ;
kv/docker-registry/basicauth/ci password | DOCKER_PASSWORD ;
kv/npm-registry/basicauth/ci token | NODE_AUTH_TOKEN;
kv/slack/node-alerts webhook | INCOMING_WEBHOOK_URL
- name: Login to DockerHub
uses: docker/login-action@v2
with:
registry: docker.fluence.dev
username: ${{ env.DOCKER_USERNAME }}
password: ${{ env.DOCKER_PASSWORD }}
- name: Checkout
uses: actions/checkout@v4
with:
repository: fluencelabs/aqua
ref: ${{ inputs.ref }}
- name: Pull nox image
run: docker pull $NOX_IMAGE
- name: Run nox
uses: isbang/compose-action@v1.5.0
with:
compose-file: ".github/e2e/docker-compose.yml"
down-flags: "--volumes"
- name: Cache Scala
uses: coursier/cache-action@v6
@ -28,5 +81,40 @@ jobs:
with:
apps: sbt
- name: Run tests
run: sbt test
- name: aqua-api build
run: sbt "aqua-apiJS/fastBundleJS"
- name: Setup pnpm
uses: pnpm/action-setup@v2.4.0
with:
version: 8
- name: Setup node with self-hosted registry
uses: actions/setup-node@v3
with:
node-version: "18"
registry-url: "https://npm.fluence.dev"
cache: "pnpm"
- name: Override dependencies
if: inputs.js-client-snapshots != 'null'
uses: fluencelabs/github-actions/pnpm-set-dependency@main
with:
dependencies: |
{
"@fluencelabs/js-client": "${{ fromJson(inputs.js-client-snapshots)['js-client'] }}"
}
- run: pnpm install --no-frozen-lockfile
working-directory: integration-tests
- name: Compile aqua
run: pnpm run compile-aqua
working-directory: integration-tests
- run: pnpm run test
working-directory: integration-tests
- name: Dump nox logs
if: always()
uses: jwalton/gh-docker-logs@v2

7
.gitignore vendored
View File

@ -9,7 +9,12 @@ project/target
.DS_Store
npm/aqua.js
language-server/language-server-npm/aqua-lsp-api.js
api/api-npm/aqua-api.js
integration-tests/src/compiled/*
**/dist
**/node_modules
**/.antlr

View File

@ -1,5 +1,83 @@
# Changelog
## [0.12.1](https://github.com/fluencelabs/aqua/compare/aqua-v0.12.0...aqua-v0.12.1) (2023-09-11)
### Bug Fixes
* **compiler:** Error on not arrow call after `<-` ([#876](https://github.com/fluencelabs/aqua/issues/876)) ([69a808e](https://github.com/fluencelabs/aqua/commit/69a808e24307b5fe312a6dfdc6041c310c33d96d))
* **compiler:** Fix closure stream capture [fixes LNG-58] ([#857](https://github.com/fluencelabs/aqua/issues/857)) ([443e65e](https://github.com/fluencelabs/aqua/commit/443e65e3d8bca4774f5bdb6db5e526c5f2201c89))
* **deps:** update dependency @fluencelabs/aqua-lib to v0.7.3 ([#882](https://github.com/fluencelabs/aqua/issues/882)) ([3419607](https://github.com/fluencelabs/aqua/commit/3419607e8ccd3d280d5d168d6ffb9cb9380d32a8))
* **deps:** update dependency @fluencelabs/js-client to v0.1.1 ([#865](https://github.com/fluencelabs/aqua/issues/865)) ([1f23545](https://github.com/fluencelabs/aqua/commit/1f23545b49db2e3bb387ef9d961cac53bb75d127))
* **deps:** update dependency @fluencelabs/js-client to v0.1.3 ([#875](https://github.com/fluencelabs/aqua/issues/875)) ([df111ad](https://github.com/fluencelabs/aqua/commit/df111adf21c1abe5fbbed7264734927a3f048ffc))
## [0.12.0](https://github.com/fluencelabs/aqua/compare/aqua-v0.11.11...aqua-v0.12.0) (2023-08-25)
### ⚠ BREAKING CHANGES
* **js-client:** Move to new js-client interface ([#855](https://github.com/fluencelabs/aqua/issues/855))
### Features
* **compiler:** Restrict abilities usage [fixes LNG-208] ([#854](https://github.com/fluencelabs/aqua/issues/854)) ([2a0b207](https://github.com/fluencelabs/aqua/commit/2a0b20763396fea0ada5e14c01372dd3923b424b))
* **js-client:** Move to new js-client interface ([#855](https://github.com/fluencelabs/aqua/issues/855)) ([0f9ede0](https://github.com/fluencelabs/aqua/commit/0f9ede09fb849915b20f87fddb95ee2514421a19))
### Bug Fixes
* **compiler:** Fix nested abilities [fixes LNG-220] ([#852](https://github.com/fluencelabs/aqua/issues/852)) ([bf0b51f](https://github.com/fluencelabs/aqua/commit/bf0b51fa5bca3be96cab028eaec48aa5805b1f73))
## [0.11.11](https://github.com/fluencelabs/aqua/compare/aqua-v0.11.10...aqua-v0.11.11) (2023-08-21)
### Features
* **compiler:** Structural typing for data and abilities [fixes LNG-215] ([#843](https://github.com/fluencelabs/aqua/issues/843)) ([019611a](https://github.com/fluencelabs/aqua/commit/019611a89c31618985303d4984ed581eadad11f5))
* parseq implementation (fixes LNG-223) ([#840](https://github.com/fluencelabs/aqua/issues/840)) ([8060695](https://github.com/fluencelabs/aqua/commit/8060695dbb0a2f34febf739eb20db8b8781b3682))
### Bug Fixes
* **compiler:** Generate stream restriction for scoped exprs [fixes LNG-222] ([#841](https://github.com/fluencelabs/aqua/issues/841)) ([eb4cdb0](https://github.com/fluencelabs/aqua/commit/eb4cdb0dd12987e64881bab6ff19f935e905672e))
* **compiler:** Refactor values [fixes LNG-57] ([#821](https://github.com/fluencelabs/aqua/issues/821)) ([f562bd4](https://github.com/fluencelabs/aqua/commit/f562bd40b6df5bbfce5635c10710d91f21e3af88))
* Fix release build command ([#834](https://github.com/fluencelabs/aqua/issues/834)) ([6146f8e](https://github.com/fluencelabs/aqua/commit/6146f8e40a59c9fecd9f40b76e6ec6398b05ca05))
## [0.11.9](https://github.com/fluencelabs/aqua/compare/aqua-v0.11.8...aqua-v0.11.9) (2023-08-09)
### Features
* **compiler:** Add boolean algebra [fixes LNG-211] ([#814](https://github.com/fluencelabs/aqua/issues/814)) ([a5b6102](https://github.com/fluencelabs/aqua/commit/a5b610242260538ff38d62dc21b97a694d0776e3))
* **compiler:** Add equality ops [fixes LNG-217] ([#820](https://github.com/fluencelabs/aqua/issues/820)) ([a5e9354](https://github.com/fluencelabs/aqua/commit/a5e9354aebe9291e9fc0b1d29e74972bfaa254e2))
* **compiler:** Restrict exporting functions that return arrow types or ability types [fixes LNG-209] ([#815](https://github.com/fluencelabs/aqua/issues/815)) ([fabf8d7](https://github.com/fluencelabs/aqua/commit/fabf8d7d61ec8d70bf8e17e581c3c7371c4e6d78))
* wrap aqua api ([#807](https://github.com/fluencelabs/aqua/issues/807)) ([c7fca40](https://github.com/fluencelabs/aqua/commit/c7fca40f670a4b5a51ab4ce188f69f550d4bf6d6))
### Bug Fixes
* **compiler:** Fix `if` with brackets parsing ([#812](https://github.com/fluencelabs/aqua/issues/812)) ([4c3c32b](https://github.com/fluencelabs/aqua/commit/4c3c32b7c400e87f962dc9827892a9224765e2a4))
* **compiler:** Fix math ops for `u64` [fixes LNG-204] ([#811](https://github.com/fluencelabs/aqua/issues/811)) ([50ba194](https://github.com/fluencelabs/aqua/commit/50ba194b8610b60bcaefee401cadacb369246f79))
* **compiler:** Nested abilities [fixes LNG-214] ([#816](https://github.com/fluencelabs/aqua/issues/816)) ([4e3e70f](https://github.com/fluencelabs/aqua/commit/4e3e70f4fc855a16238c4f84bd4f6a1102890904))
* **compiler:** Runtime error on compilation exported functions with top types [fixes LNG-218] ([#822](https://github.com/fluencelabs/aqua/issues/822)) ([ef4b014](https://github.com/fluencelabs/aqua/commit/ef4b0143ac7cd4e1a5997d6a0f1f690ab806a315))
## [0.11.8](https://github.com/fluencelabs/aqua/compare/aqua-v0.11.7...aqua-v0.11.8) (2023-07-20)
### Features
* **compiler:** Abilities ([#731](https://github.com/fluencelabs/aqua/issues/731)) ([63a9f42](https://github.com/fluencelabs/aqua/commit/63a9f42e86d29b741fa31135b4111bc0f38f238f))
* **compiler:** Find and display link cycles ([#787](https://github.com/fluencelabs/aqua/issues/787)) ([667a825](https://github.com/fluencelabs/aqua/commit/667a8255d994b334dfc87bd89a970855748752fe))
* **compiler:** Make `on` propagate errors [fixes LNG-203] ([#788](https://github.com/fluencelabs/aqua/issues/788)) ([b8b0faf](https://github.com/fluencelabs/aqua/commit/b8b0fafda0d27607ffc693e52c0dae81d23ec503))
* **compiler:** Make topology hop with non-FFI snippet [fixes LNG-125] ([#764](https://github.com/fluencelabs/aqua/issues/764)) ([c1fe24b](https://github.com/fluencelabs/aqua/commit/c1fe24b04d8a2f711ed7b316e7ae9a4f12732421))
### Bug Fixes
* **ci:** use unstable nox image ([#780](https://github.com/fluencelabs/aqua/issues/780)) ([22f380a](https://github.com/fluencelabs/aqua/commit/22f380a49162d8d79cccad266b17116d9f9c7795))
* **compiler:** Fix search for one element cycles ([#797](https://github.com/fluencelabs/aqua/issues/797)) ([33ab33d](https://github.com/fluencelabs/aqua/commit/33ab33d4c8f34743202e5acbfb2e976ab3070299))
* **deps:** update dependency @fluencelabs/fluence-network-environment to v1.1.2 ([#786](https://github.com/fluencelabs/aqua/issues/786)) ([ca52e25](https://github.com/fluencelabs/aqua/commit/ca52e2542cc031c748c6f8c8372aff717e9c709f))
## [0.11.7](https://github.com/fluencelabs/aqua/compare/aqua-v0.11.6...aqua-v0.11.7) (2023-06-16)

View File

@ -1,40 +0,0 @@
# Installation of Aqua
The easiest way to use Aqua is to download the latest build from npm: [@fluencelabs/aqua](https://www.npmjs.com/package/@fluencelabs/aqua).
```bash
npm i -g @fluencelabs/aqua
aqua --input src/aqua --output src/generated
```
Input directory should contain files with `.aqua` scripts.
Other ways of building Aqua can be found in [INSTALL.md](./INSTALL.md).
## Build from sources
If you want to build Aqua compiler from the sourcecode, you need [Scala](https://www.scala-lang.org/)'s `sbt` installed.
Aqua compiler itself can be compiled to and distributed either as JavaScript or Java file.
### Build to JS
Run `sbt cliJS/fullLinkOpt` to build JavaScript file. You can find the compiled file in: `cli/.js/target/scala-%scala-version%/cli-opt`.
Then run it with `node`:
```bash
node aqua-%version_number%.js -i path/to/input/dir -o path/to/output/dir
```
Javascript build is the default for Aqua.
### Build to JVM
Run `sbt cli/assembly` to build JAR file. It is located in `cli/.jvm/target/scala-%scala-version%/`
It requires `java` to run Aqua compiler in `.jar` file from the command line:
```bash
java -jar aqua-%version_number%.jar -i path/to/input/dir -o path/to/output/dir
```

View File

@ -1,23 +1,16 @@
# Aqua
[![release](https://github.com/fluencelabs/aqua/actions/workflows/release.yml/badge.svg)](https://github.com/fluencelabs/aqua/actions/workflows/release.yml)
[![npm](https://img.shields.io/npm/v/@fluencelabs/aqua)](https://www.npmjs.com/package/@fluencelabs/aqua)
[![npm](https://img.shields.io/npm/v/@fluencelabs/aqua-api)](https://www.npmjs.com/package/@fluencelabs/aqua-api)
[Aqua](https://fluence.dev/docs/aqua-book/introduction) is an open-source language for distributed workflow coordination in p2p networks. Aqua programs are executed on many peers, sequentially or in parallel, forming a single-use coordination network. Applications are turned into hostless workflows over distributed function calls, which enables various levels of decentralization: from handling by a limited set of servers to complete peer-to-peer architecture by connecting user devices directly. Aqua is the core of the [Fluence](https://fluence.network/) protocol and a framework for internet or private cloud applications.
## Installation and Usage
## Usage
The easiest way to use Aqua is to download the latest build from npm: [@fluencelabs/aqua](https://www.npmjs.com/package/@fluencelabs/aqua).
The easiest way to use Aqua is through [Fluence CLI](https://github.com/fluencelabs/cli) with [aqua command](https://github.com/fluencelabs/cli/blob/main/docs/commands/README.md#fluence-aqua).
```bash
npm i -g @fluencelabs/aqua
aqua --input src/aqua --output src/generated
```
Input directory should contain files with `.aqua` scripts.
Other ways of installing Aqua can be found in [INSTALL.md](./INSTALL.md).
Other ways of using Aqua are described in [USAGE.md](./USAGE.md).
## Documentation
@ -28,16 +21,13 @@ Comprehensive documentation and usage examples as well as a number of videos can
## Repository Structure
- [**api**](./api) - Aqua API for JS
- [aqua-api](./api/aqua-api) - Scala sources
- [aqua-api-npm](./api/aqua-api-npm) - JS project
- [aqua-api-example](./api/aqua-api-example) - usage example
- [api](./api/api) - Scala sources
- [api-npm](./api/api-npm) - JS project
- [api-example](./api/api-example) - usage example
- [**aqua-run**](./aqua-run) - Aqua API to run functions
- [**backend**](./backend) - compilation backend interface
- [backend/air](./backend/air) - generates AIR code from the middle-end model
- [backend/ts](./backend/ts) - generates AIR code and TypeScript wrappers for use with [Fluence JS]( https://github.com/fluencelabs/fluence-js) SDK
- [**cli**](./cli) - CLI interface
- [cli](./cli/cli) - Scala sources
- [cli-npm](./cli/cli-npm) - JS project
- [**compiler**](./compiler) - compiler as a pure function made from _linker_, _semantics_ and _backend_
- [**model**](./model) - middle-end, internal representation of the code, optimizations and transformations
- [transform](./model/transform) - optimizations and transformations, converting model to the result, ready to be rendered

28
USAGE.md Normal file
View File

@ -0,0 +1,28 @@
# Usage of Aqua
## Fluence CLI
The easiest way to use Aqua is through [Fluence CLI](https://github.com/fluencelabs/cli) with [aqua command](https://github.com/fluencelabs/cli/blob/main/docs/commands/README.md#fluence-aqua).
## JS API
You can use Aqua compiler API directly from JS code by installing [`@fluencelabs/aqua-api` package](https://www.npmjs.com/package/@fluencelabs/aqua-api). See an example usage in [api/api-example](./api/api-example).
## Build from sources
If you want to build Aqua compiler API from the source code, you need [Scala](https://www.scala-lang.org/)'s [`sbt`](https://www.scala-sbt.org/) installed.
### Build to JS package
Javascript build is the default for Aqua compiler API.
Run `sbt "aqua-apiJS/fullBundleJS"`. It will generate JS package in `api/api-npm` directory.
### Build to JVM library
Building Aqua compiler API as JVM lib is technically possible, but is not supported.

20
api/api-example/index.js Normal file
View File

@ -0,0 +1,20 @@
//@ts-check
import { compileAquaCallFromPath } from '@fluencelabs/aqua-api'
// compile call
const compilationResult = await compileAquaCallFromPath({
filePath: 'test.aqua',
data: { num: 3 },
funcCall: 'getNumber(num)',
})
const {
errors,
functionCall: { funcDef, script },
functions,
generatedSources,
services,
} = compilationResult
console.log(script)

View File

@ -10,6 +10,6 @@
"type": "module",
"license": "Apache-2.0",
"dependencies": {
"@fluencelabs/aqua-api": "0.10.4"
"@fluencelabs/aqua-api": "workspace:*"
}
}

3
api/api-npm/README.md Normal file
View File

@ -0,0 +1,3 @@
# Aqua API
[Documentation](https://fluence.dev/docs/aqua-book/aqua-js-api)

90
api/api-npm/index.d.ts vendored Normal file
View File

@ -0,0 +1,90 @@
import { ServiceDef, FunctionCallDef } from "@fluencelabs/interfaces";
export class AquaFunction {
funcDef: FunctionCallDef;
script: string;
}
export class GeneratedSource {
name: string;
tsSource?: string;
jsSource?: string;
tsTypes?: string;
}
class CompilationResult {
services: Record<string, ServiceDef>;
functions: Record<string, AquaFunction>;
functionCall?: AquaFunction;
errors: string[];
generatedSources: GeneratedSource[];
}
/** Common arguments for all compile functions */
type CommonArgs = {
/** Paths to directories, which you want to import .aqua files from. Example: ["./path/to/dir"] */
imports?: string[] | undefined;
/** Constants to be passed to the compiler. Example: ["CONSTANT1=1", "CONSTANT2=2"] */
constants?: string[] | undefined;
/** Set log level for the compiler. Must be one of: Must be one of: all, trace, debug, info, warn, error, off. Default: info */
logLevel?: string | undefined;
/** Do not generate a pass through the relay node. Default: false */
noRelay?: boolean | undefined;
/** Do not generate a wrapper that catches and displays errors. Default: false */
noXor?: boolean | undefined;
/** Target type for the compiler. Must be one of: ts, js, air. Default: air */
targetType?: "ts" | "js" | "air" | undefined;
/** Compile aqua in tracing mode (for debugging purposes). Default: false */
tracing?: boolean | undefined;
};
type CodeString = {
/** Aqua code to be compiled */
code: string;
};
export type CompileFromStringArgs = CommonArgs & CodeString;
export type CompileFromStringReturnType = Omit<CompilationResult, "funcCall">;
/** Compile aqua code from a string */
export declare function compileFromString(
args: CompileFromStringArgs,
): Promise<CompileFromStringReturnType>;
type FilePath = {
/** Path to the aqua file to be compiled */
filePath: string;
};
export type CompileFromPathArgs = CommonArgs & FilePath;
export type CompileFromPathReturnType = Omit<CompilationResult, "funcCall">;
/** Compile aqua code from a file */
export declare function compileFromPath(
args: CompileFromPathArgs,
): Promise<CompileFromPathReturnType>;
type FuncCall = {
/** Function call you want to compile. Example: someFunc("someArg") */
funcCall: string;
/** Args to be passed to the function (record with keys named as args you want to pass to the function) Example: { someArg: 1 } */
data?: Record<string, unknown> | undefined;
};
export type CompileFuncCallFromStringArgs = CommonArgs & CodeString & FuncCall;
export type CompileFuncCallFromStringReturnType = Required<CompilationResult>;
/** Compile aqua function call from a string */
export declare function compileAquaCallFromString(
args: CompileFuncCallFromStringArgs,
): Promise<CompileFuncCallFromStringReturnType>;
export type CompileFuncCallFromPathArgs = CommonArgs & FilePath & FuncCall;
export type CompileFuncCallFromPathReturnType = Required<CompilationResult>;
/** Compile aqua function call from a file */
export declare function compileAquaCallFromPath(
args: CompileFuncCallFromPathArgs,
): Promise<CompileFuncCallFromPathReturnType>;
export {};

65
api/api-npm/index.js Normal file
View File

@ -0,0 +1,65 @@
import { AquaConfig, Aqua, Call, Input, Path } from "./aqua-api.js";
function getConfig({
constants = [],
logLevel = "info",
noRelay = false,
noXor = false,
targetType = "air",
tracing = false,
}) {
return new AquaConfig(
logLevel,
constants,
noXor,
noRelay,
{
ts: "typescript",
js: "javascript",
air: "air",
}[targetType],
tracing,
);
}
export function compileFromString({ code, ...commonArgs }) {
const config = getConfig(commonArgs);
const { imports = [] } = commonArgs;
return Aqua.compile(new Input(code), imports, config);
}
export function compileFromPath({ filePath, ...commonArgs }) {
const config = getConfig(commonArgs);
const { imports = [] } = commonArgs;
return Aqua.compile(new Path(filePath), imports, config);
}
export function compileAquaCallFromString({
code,
funcCall,
data,
...commonArgs
}) {
const config = getConfig(commonArgs);
const { imports = [] } = commonArgs;
return Aqua.compile(
new Call(funcCall, data, new Input(code)),
imports,
config,
);
}
export function compileAquaCallFromPath({
filePath,
funcCall,
data,
...commonArgs
}) {
const config = getConfig(commonArgs);
const { imports = [] } = commonArgs;
return Aqua.compile(
new Call(funcCall, data, new Input(filePath)),
imports,
config,
);
}

View File

@ -1,17 +1,16 @@
{
"name": "@fluencelabs/aqua-api",
"version": "0.11.7",
"version": "0.12.1",
"description": "Aqua API",
"type": "commonjs",
"type": "module",
"main": "index.js",
"files": [
"index.js",
"index.d.ts",
"aqua-api.js",
"aqua-api.d.ts",
"meta-utils.js"
],
"scripts": {
"move:scalajs": "cp ../aqua-api/.js/target/scala-3.3.0/aqua-api-opt/main.js ./aqua-api.js",
"move:fast": "cp ../aqua-api/.js/target/scala-3.3.0/aqua-api-fastopt/main.js ./aqua-api.js"
},
"prettier": {},
"repository": {
"type": "git",
"url": "git+https://github.com/fluencelabs/aqua.git"
@ -27,6 +26,7 @@
},
"homepage": "https://github.com/fluencelabs/aqua#readme",
"devDependencies": {
"@fluencelabs/fluence": "0.28.0"
"@fluencelabs/interfaces": "0.8.2",
"prettier": "3.0.0"
}
}

View File

@ -0,0 +1,40 @@
package aqua.api
import aqua.api.TargetType.TypeScriptType
import aqua.backend.ts.TypeScriptBackend
import aqua.compiler.AquaCompiled
import aqua.files.FileModuleId
import cats.data.Chain
import cats.data.Validated.{Invalid, Valid}
import cats.effect.{IO, IOApp}
import fs2.io.file.{Files, Path}
import fs2.{Stream, text}
object Test extends IOApp.Simple {
override def run: IO[Unit] = {
APICompilation
.compilePath(
"./aqua-src/antithesis.aqua",
"./aqua" :: Nil,
AquaAPIConfig(targetType = TypeScriptType),
TypeScriptBackend(false, "IFluenceClient$$")
)
.flatMap {
case Valid(res) =>
val content = res.get(0).get.compiled.head.content
val targetPath = Path("./target/antithesis.ts")
Stream.emit(content)
.through(text.utf8.encode)
.through(Files[IO].writeAll(targetPath))
.attempt
.compile
.last.flatMap(_ => IO.delay(println(s"File: ${targetPath.absolute.normalize}")))
case Invalid(e) =>
IO.delay(println(e))
}
}
}

View File

@ -8,14 +8,13 @@ case class AquaAPIConfig(
targetType: TargetType = TargetType.AirType,
logLevel: String = "info",
constants: List[String] = Nil,
noXor: Boolean = false,
noXor: Boolean = false, // TODO: Remove
noRelay: Boolean = false,
tracing: Boolean = false
) {
def getTransformConfig: TransformConfig = {
val config = TransformConfig(
wrapWithXor = !noXor,
tracing = Option.when(tracing)(TransformConfig.TracingConfig.default)
)

View File

@ -1,32 +0,0 @@
import {
Aqua,
Call,
Path,
} from "@fluencelabs/aqua-api/aqua-api.js";
const aquaPath = new Path("test.aqua")
// write function that we want to call and arguments
const args = {num: 42}
const call = new Call("getNumber(num)", args, aquaPath)
// compile call
const compilationResult = await Aqua.compile(call, [])
/*
// Compilation result definition
export class CompilationResult {
// List of service definitions to register in Fluence JS Client
services: Record<string, ServiceDef>
// List of function definitions to call in Fluence JS Client
functions: Record<string, AquaFunction>
// Definition of wrapped function to call in Fluence JS Client
functionCall?: AquaFunction
// List of errors. All other fields will be empty if `errors` not empty
errors: string[]
}
*/
// get function definition, that describes types of arguments and results of a function
// and AIR script
const {funcDef, script} = compilationResult.functionCall

View File

@ -1,21 +0,0 @@
{
"name": "aqua-api-example",
"version": "1.0.0",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "aqua-api-example",
"version": "1.0.0",
"license": "Apache-2.0",
"dependencies": {
"@fluencelabs/aqua-api": "0.10.4"
}
},
"node_modules/@fluencelabs/aqua-api": {
"version": "0.10.4",
"resolved": "https://registry.npmjs.org/@fluencelabs/aqua-api/-/aqua-api-0.10.4.tgz",
"integrity": "sha512-mBT/ht0mVcGqBfkrnQw9E/tqIW3RNOCDhNwjra9X5WI/TRlztU3G8Vn/odVn6YTpZWJeDwn1qN1VgLoS3VkASA=="
}
}
}

View File

@ -1,67 +0,0 @@
import type { FunctionCallDef, ServiceDef } from "@fluencelabs/fluence/dist/internal/compilerSupport/v3impl/interface"
export class AquaConfig {
constructor(
logLevel?: string,
constants?: string[],
noXor?: boolean,
noRelay?: boolean,
targetType?: string,
tracing?: boolean
);
logLevel?: string
constants?: string[]
noXor?: boolean
noRelay?: boolean
targetType?: string
tracing?: boolean
}
export class AquaFunction {
funcDef: FunctionCallDef
script: string
}
export class GeneratedSource {
name: string
tsSource?: string
jsSource?: string
tsTypes?: string
}
export class CompilationResult {
services: Record<string, ServiceDef>
functions: Record<string, AquaFunction>
functionCall?: AquaFunction
errors: string[]
generatedSources: GeneratedSource[]
}
export class Input {
constructor(input: string);
input: string
}
export class Path {
constructor(path: string);
path: string
}
export class Call {
constructor(functionCall: string,
arguments: any,
input: Input | Path);
functionCall: string
arguments: any
input: Input | Path
}
export class Compiler {
compile(input: Input | Path | Call, imports: string[], config?: AquaConfig): Promise<CompilationResult>;
}
export var Aqua: Compiler;

File diff suppressed because it is too large Load Diff

View File

@ -1,3 +0,0 @@
# Aqua API
[Documentation](https://fluence.dev/docs/aqua-book/aqua-js-api)

View File

@ -1,18 +0,0 @@
package aqua.api
import cats.effect.{IO, IOApp}
import aqua.backend.js.JavaScriptBackend
import aqua.backend.ts.TypeScriptBackend
import aqua.api.TargetType.JavaScriptType
object Test extends IOApp.Simple {
override def run: IO[Unit] = {
val input =
"""func getNumber(number: u32) -> u32:
| <- number
|""".stripMargin
APICompilation.compileString(input, Nil, AquaAPIConfig(targetType = JavaScriptType), JavaScriptBackend()).map {
res =>
println(res)
}
}
}

View File

@ -4,68 +4,61 @@ import aqua.parser.lexer.{CallArrowToken, CollectionToken, LiteralToken, VarToke
import aqua.parser.lift.Span
import aqua.raw.value.{CollectionRaw, LiteralRaw, ValueRaw, VarRaw}
import aqua.types.{ArrayType, BottomType}
import cats.data.{NonEmptyList, Validated, ValidatedNel}
import cats.data.Validated.{invalid, invalidNel, validNel}
import cats.{Id, ~>}
import cats.{~>, Id}
import cats.syntax.traverse.*
import cats.syntax.validated.*
import cats.syntax.either.*
import cats.syntax.comonad.*
import cats.syntax.option.*
case class CliFunc(name: String, args: List[ValueRaw] = Nil, ability: Option[String] = None)
case class CliFunc(name: String, args: List[ValueRaw] = Nil)
object CliFunc {
def spanToId: Span.S ~> Id = new (Span.S ~> Id) {
override def apply[A](span: Span.S[A]): Id[A] = {
span._2
}
override def apply[A](span: Span.S[A]): Id[A] = span.extract
}
def fromString(func: String): ValidatedNel[String, CliFunc] = {
CallArrowToken.callArrow.parseAll(func.trim) match {
case Right(exprSpan) =>
val expr = exprSpan.mapK(spanToId)
val argsV = expr.args.collect {
CallArrowToken.callArrow
.parseAll(func.trim)
.toValidated
.leftMap(
_.expected.map(_.context.mkString("\n"))
)
.map(_.mapK(spanToId))
.andThen(expr =>
expr.args.traverse {
case LiteralToken(value, ts) =>
validNel(LiteralRaw(value, ts))
case VarToken(name, _) =>
validNel(VarRaw(name.value, BottomType))
LiteralRaw(value, ts).valid
case VarToken(name) =>
VarRaw(name.value, BottomType).valid
case CollectionToken(_, values) =>
val hasVariables = values.exists {
case LiteralToken(_, _) => false
case _ => true
}
if (!hasVariables) {
val literals = values.collect { case LiteralToken(value, ts) =>
LiteralRaw(value, ts)
}
val hasSameTypesOrEmpty =
literals.isEmpty || literals.map(_.baseType).toSet.size == 1
if (hasSameTypesOrEmpty) {
validNel(
NonEmptyList
.fromList(literals)
.map(l => CollectionRaw(l, ArrayType(l.head.baseType)))
.getOrElse(ValueRaw.Nil)
)
} else
invalidNel(
"If the argument is an array, then it must contain elements of the same type."
)
} else
invalidNel(
"Array arguments can only have numbers, strings, or booleans."
values.traverse {
case LiteralToken(value, ts) =>
LiteralRaw(value, ts).some
case _ => none
}.toValid(
"Array elements can only be numbers, strings, or booleans."
).ensure(
"If the argument is an array, then it must contain elements of the same type."
)(_.distinctBy(_.`type`).size <= 1)
.map(
NonEmptyList
.fromList(_)
.map(l => CollectionRaw(l, ArrayType(l.head.baseType)))
.getOrElse(ValueRaw.Nil)
)
.toValidatedNel
case CallArrowToken(_, _, _) =>
invalidNel("Function calls as arguments are not supported.")
}.sequence
argsV.andThen(args =>
validNel(CliFunc(expr.funcName.value, args, expr.ability.map(_.name)))
)
case Left(err) => invalid(err.expected.map(_.context.mkString("\n")))
}
"Function calls as arguments are not supported.".invalidNel
case _ =>
"Unsupported argument.".invalidNel
}.map(args => CliFunc(expr.funcName.value, args))
)
}
}

View File

@ -9,6 +9,7 @@ import aqua.model.transform.TransformConfig
import aqua.model.{AquaContext, FuncArrow}
import aqua.parser.lift.FileSpan
import aqua.run.CliFunc
import cats.data.Validated.{invalidNec, validNec}
import cats.data.{Chain, NonEmptyList, Validated, ValidatedNec}
import cats.effect.IO
@ -19,6 +20,7 @@ import cats.syntax.functor.*
import cats.syntax.monad.*
import cats.syntax.show.*
import cats.syntax.traverse.*
import cats.syntax.option.*
import fs2.io.file.{Files, Path}
import scribe.Logging
@ -84,16 +86,9 @@ object FuncCompiler {
def findFunction(
contexts: Chain[AquaContext],
func: CliFunc
): ValidatedNec[String, FuncArrow] =
func.ability
.fold(
contexts
.collectFirstSome(_.allFuncs.get(func.name))
)(ab => contexts.collectFirstSome(_.abilities.get(ab).flatMap(_.allFuncs.get(func.name))))
.map(validNec)
.getOrElse(
Validated.invalidNec[String, FuncArrow](
s"There is no function '${func.ability.map(_ + ".").getOrElse("")}${func.name}' or it is not exported. Check the spelling or see https://fluence.dev/docs/aqua-book/language/header/#export"
)
)
): ValidatedNec[String, FuncArrow] = contexts
.collectFirstSome(_.allFuncs.get(func.name))
.toValidNec(
s"There is no function '${func.name}' or it is not exported. Check the spelling or see https://fluence.dev/docs/aqua-book/language/header/#export"
)
}

View File

@ -1,9 +1,12 @@
module Import3 declares *
service Console("run-console"):
print(s: string)
export foo_bar
use "export.aqua"
func foo_bar() -> string, string:
z <- FooBars.foo()
<- z, FooBars.DECLARE_CONST2
func main():
ss: *string
dd: *string
peerId = "peerId"
relay = "relay"
parsec s <- ss on peerId via relay:
Console.print(s)
for d <- dd par:
Console.print(d)

View File

@ -24,6 +24,8 @@ object Keyword {
case object Ap extends Keyword("ap")
case object Fail extends Keyword("fail")
case object Canon extends Keyword("canon")
case object Seq extends Keyword("seq")
@ -46,8 +48,6 @@ object DataView {
case class Variable(name: String) extends DataView
case class Stream(name: String) extends DataView
case class VarLens(name: String, lens: String, isField: Boolean = true) extends DataView {
def append(sublens: String): VarLens = copy(lens = lens + sublens)
}
@ -57,7 +57,6 @@ object DataView {
case InitPeerId "%init_peer_id%"
case LastError "%last_error%"
case Variable(name) name
case Stream(name) name
case VarLens(name, lens, isField)
if (isField) name + ".$" + lens
else name + lens
@ -90,7 +89,12 @@ object Air {
case class Next(label: String) extends Air(Keyword.Next)
case class Fold(iterable: DataView, label: String, instruction: Air, lastNextInstruction: Option[Air]) extends Air(Keyword.Fold)
case class Fold(
iterable: DataView,
label: String,
instruction: Air,
lastNextInstruction: Option[Air]
) extends Air(Keyword.Fold)
case class Match(left: DataView, right: DataView, instruction: Air) extends Air(Keyword.Match)
@ -108,6 +112,8 @@ object Air {
case class Ap(op: DataView, result: String) extends Air(Keyword.Ap)
case class Fail(op: DataView) extends Air(Keyword.Fail)
case class Canon(op: DataView, peerId: DataView, result: String) extends Air(Keyword.Canon)
case class Comment(comment: String, air: Air) extends Air(Keyword.NA)
@ -141,6 +147,7 @@ object Air {
case Air.Call(triplet, args, res)
s" ${triplet.show} [${args.map(_.show).mkString(" ")}]${res.fold("")(" " + _)}"
case Air.Ap(operand, result) s" ${operand.show} $result"
case Air.Fail(operand) => s" ${operand.show}"
case Air.Canon(operand, peerId, result) s" ${peerId.show} ${operand.show} $result"
case Air.Comment(_, _) => ";; Should not be displayed"
}) + ")\n"

View File

@ -3,7 +3,7 @@ package aqua.backend.air
import aqua.model.*
import aqua.raw.ops.Call
import aqua.res.*
import aqua.types.{ArrayType, CanonStreamType, StreamType}
import aqua.types.{ArrayType, CanonStreamType, StreamType, Type}
import cats.Eval
import cats.data.Chain
import cats.free.Cofree
@ -26,14 +26,17 @@ object AirGen extends Logging {
s".[$idx]${propertyToString(tail)}"
}
def varNameToString(name: String, `type`: Type): String =
(`type` match {
case _: StreamType => "$" + name
case _: CanonStreamType => "#" + name
case _ => name
}).replace('.', '_')
def valueToData(vm: ValueModel): DataView = vm match {
case LiteralModel(value, _) => DataView.StringScalar(value)
case VarModel(name, t, property) =>
val n = (t match {
case _: StreamType => "$" + name
case _: CanonStreamType => "#" + name
case _ => name
}).replace('.', '_')
val n = varNameToString(name, t)
if (property.isEmpty) DataView.Variable(n)
else {
val functors = property.find {
@ -93,12 +96,12 @@ object AirGen extends Logging {
case FoldRes(item, iterable, mode) =>
val m = mode.map {
case ForModel.NullMode => NullGen
case ForModel.NeverMode => NeverGen
case ForModel.Mode.Null => NullGen
case ForModel.Mode.Never => NeverGen
}
Eval later ForGen(valueToData(iterable), item, opsToSingle(ops), m)
case RestrictionRes(item, isStream) =>
Eval later NewGen(item, isStream, opsToSingle(ops))
case RestrictionRes(item, itemType) =>
Eval later NewGen(varNameToString(item, itemType), opsToSingle(ops))
case CallServiceRes(serviceId, funcName, CallRes(args, exportTo), peerId) =>
Eval.later(
ServiceCallGen(
@ -115,6 +118,11 @@ object AirGen extends Logging {
ApGen(valueToData(operand), exportToString(exportTo))
)
case FailRes(operand) =>
Eval.later(
FailGen(valueToData(operand))
)
case CanonRes(operand, peerId, exportTo) =>
Eval.later(
CanonGen(valueToData(operand), valueToData(peerId), exportToString(exportTo))
@ -161,6 +169,12 @@ case class ApGen(operand: DataView, result: String) extends AirGen {
Air.Ap(operand, result)
}
case class FailGen(operand: DataView) extends AirGen {
override def generate: Air =
Air.Fail(operand)
}
case class CanonGen(operand: DataView, peerId: DataView, result: String) extends AirGen {
override def generate: Air =
@ -179,14 +193,17 @@ case class MatchMismatchGen(
else Air.Mismatch(left, right, body.generate)
}
case class ForGen(iterable: DataView, item: String, body: AirGen, mode: Option[AirGen]) extends AirGen {
case class ForGen(iterable: DataView, item: String, body: AirGen, mode: Option[AirGen])
extends AirGen {
override def generate: Air = Air.Fold(iterable, item, body.generate, mode.map(_.generate))
}
case class NewGen(item: String, isStream: Boolean, body: AirGen) extends AirGen {
case class NewGen(name: String, body: AirGen) extends AirGen {
override def generate: Air =
Air.New(if (isStream) DataView.Stream("$" + item) else DataView.Variable(item), body.generate)
override def generate: Air = Air.New(
DataView.Variable(name),
body.generate
)
}
case class NextGen(item: String) extends AirGen {

View File

@ -106,6 +106,9 @@ object TypeDefinition {
case t: BoxType => ArrayTypeDef(TypeDefinition(t.element))
case StructType(name, fields) =>
StructTypeDef(name, fields.toSortedMap.view.mapValues(TypeDefinition.apply).toMap)
case AbilityType(name, fieldAndArrows) =>
// TODO: change in union with JS side
StructTypeDef(name, fieldAndArrows.toSortedMap.view.mapValues(TypeDefinition.apply).toMap)
case t: ScalarType => ScalarTypeDef.fromScalar(t)
case t: LiteralType => ScalarTypeDef.fromLiteral(t)
case t: ProductType => ProductTypeDef(t)

View File

@ -21,13 +21,13 @@ object Header {
val callParams =
if (isJs) ""
else
"import type { IFluenceClient as IFluenceClient$$, CallParams as CallParams$$ } from '@fluencelabs/js-client.api';"
"import type { IFluenceClient as IFluenceClient$$, CallParams as CallParams$$ } from '@fluencelabs/js-client';"
s"""$callParams
|import {
| v5_callFunction as callFunction$$$$,
| v5_registerService as registerService$$$$,
|} from '@fluencelabs/js-client.api';
|} from '@fluencelabs/js-client';
""".stripMargin
}

View File

@ -1,6 +1,5 @@
package aqua.backend.ts
import aqua.backend.air.FuncAirGen
import aqua.res.FuncRes
import aqua.types.*
import cats.syntax.show.*
@ -36,14 +35,28 @@ object TypeScriptCommon {
"[" + pt.toList.map(typeToTs).mkString(", ") + "]"
case st: StructType =>
s"{ ${st.fields.map(typeToTs).toNel.map(kv => kv._1 + ": " + kv._2 + ";").toList.mkString(" ")} }"
case st: ScalarType if ScalarType.number(st) => "number"
case ScalarType.bool => "boolean"
case ScalarType.string => "string"
case lt: LiteralType if lt.oneOf.exists(ScalarType.number) => "number"
case lt: LiteralType if lt.oneOf(ScalarType.bool) => "boolean"
case lt: LiteralType if lt.oneOf(ScalarType.string) => "string"
case _: DataType => "any"
case st: AbilityType =>
s"{ ${st.fields.map(typeToTs).toNel.map(kv => kv._1 + ": " + kv._2 + ";").toList.mkString(" ")} }"
case st: ScalarType => st match {
case st: ScalarType if ScalarType.number(st) => "number"
case ScalarType.bool => "boolean"
case ScalarType.string => "string"
// unreachable
case _ => "any"
}
case lt: LiteralType => lt match {
case lt: LiteralType if lt.oneOf.exists(ScalarType.number) => "number"
case lt: LiteralType if lt.oneOf(ScalarType.bool) => "boolean"
case lt: LiteralType if lt.oneOf(ScalarType.string) => "string"
// unreachable
case _ => "any"
}
case at: ArrowType => fnDef(at)
case TopType => "any"
case BottomType => "nothing"
// impossible. Made to avoid compilation warning
case t: CanonStreamType => "any"
}
// TODO: handle cases if there is already peer_ or config_ variable defined

135
build.sbt
View File

@ -1,11 +1,13 @@
val aquaVersion = "0.11.7"
import BundleJS.*
val scalaV = "3.3.0"
val catsV = "2.8.0"
val catsParseV = "0.3.9"
val aquaVersion = "0.12.1"
val scalaV = "3.3.1"
val catsV = "2.10.0"
val catsParseV = "0.3.10"
val monocleV = "3.1.0"
val scalaTestV = "3.2.16"
val fs2V = "3.7.0"
val scalaTestV = "3.2.17"
val fs2V = "3.9.2"
val catsEffectV = "3.6-1f95fd7"
val declineV = "2.3.0"
val circeVersion = "0.14.2"
@ -38,65 +40,17 @@ val commons = Seq(
commons
lazy val cli = crossProject(JSPlatform, JVMPlatform)
.withoutSuffixFor(JVMPlatform)
.crossType(CrossType.Pure)
.in(file("cli/cli"))
.enablePlugins(GraalVMNativeImagePlugin)
.settings(commons: _*)
.settings(
Compile / mainClass := Some("aqua.AquaCli"),
graalVMNativeImageOptions ++= Seq(
"--no-fallback",
"--diagnostics-mode",
"--initialize-at-build-time",
"--initialize-at-run-time=scala.util.Random$",
"-H:-DeleteLocalSymbols",
"-H:+PreserveFramePointer",
"-H:+ReportExceptionStackTraces",
"-H:+DashboardHeap",
"-H:+DashboardCode",
"-H:+DashboardPointsTo",
"-H:+DashboardAll"
) ++ sys.env
.get("COMPILE_STATIC")
.filter(_.trim.toLowerCase() == "true")
.map(_ => Seq("--static"))
.getOrElse(Seq.empty),
libraryDependencies ++= Seq(
"com.monovore" %%% "decline" % declineV,
"com.monovore" %%% "decline-effect" % declineV
)
)
.dependsOn(compiler, `backend-air`, `backend-ts`, io, definitions, logging, constants, `aqua-run`)
lazy val cliJS = cli.js
.settings(
scalaJSLinkerConfig ~= (_.withModuleKind(ModuleKind.ESModule)),
scalaJSUseMainModuleInitializer := true
)
.dependsOn(`js-exports`, `js-imports`)
lazy val cliJVM = cli.jvm
.settings(
Compile / run / mainClass := Some("aqua.AquaCli"),
assembly / mainClass := Some("aqua.AquaCli"),
assembly / assemblyJarName := "aqua-" + version.value + ".jar",
libraryDependencies ++= Seq(
)
)
lazy val `aqua-run` = crossProject(JSPlatform, JVMPlatform)
.withoutSuffixFor(JVMPlatform)
.crossType(CrossType.Pure)
.in(file("aqua-run"))
.settings(commons: _*)
.settings(commons)
.dependsOn(compiler, `backend-air`, `backend-ts`, io, definitions, logging, constants)
lazy val io = crossProject(JVMPlatform, JSPlatform)
.withoutSuffixFor(JVMPlatform)
.crossType(CrossType.Pure)
.settings(commons: _*)
.settings(commons)
.settings(
libraryDependencies ++= Seq(
"org.typelevel" %%% "cats-effect" % catsEffectV,
@ -111,7 +65,7 @@ lazy val `language-server-api` = crossProject(JSPlatform, JVMPlatform)
.withoutSuffixFor(JVMPlatform)
.crossType(CrossType.Pure)
.in(file("language-server/language-server-api"))
.settings(commons: _*)
.settings(commons)
.settings(
libraryDependencies ++= Seq(
"org.typelevel" %%% "cats-effect" % catsEffectV,
@ -125,34 +79,36 @@ lazy val `language-server-apiJS` = `language-server-api`.js
scalaJSLinkerConfig ~= (_.withModuleKind(ModuleKind.CommonJSModule)),
scalaJSUseMainModuleInitializer := true
)
.settings(addBundleJS("../../language-server-npm/aqua-lsp-api.js"))
.enablePlugins(ScalaJSPlugin)
.dependsOn(`js-exports`, `js-imports`)
lazy val `js-exports` = project
.in(file("js/js-exports"))
.enablePlugins(ScalaJSPlugin)
.settings(commons: _*)
.settings(commons)
.dependsOn(`backend`.js, definitions.js)
lazy val `js-imports` = project
.in(file("js/js-imports"))
.enablePlugins(ScalaJSPlugin)
.settings(commons: _*)
.settings(commons)
.dependsOn(`js-exports`, transform.js)
lazy val `aqua-api` = crossProject(JSPlatform, JVMPlatform)
.withoutSuffixFor(JVMPlatform)
.crossType(CrossType.Pure)
.in(file("api/aqua-api"))
.settings(commons: _*)
.in(file("api/api"))
.settings(commons)
.dependsOn(`aqua-run`, `backend-api`)
lazy val `aqua-apiJS` = `aqua-api`.js
.settings(
scalaJSLinkerConfig ~= (_.withModuleKind(ModuleKind.CommonJSModule)),
scalaJSLinkerConfig ~= (_.withModuleKind(ModuleKind.ESModule)),
scalaJSUseMainModuleInitializer := true,
Test / test := {}
)
.settings(addBundleJS("../../api-npm/aqua-api.js"))
.enablePlugins(ScalaJSPlugin)
.dependsOn(`js-exports`)
@ -169,70 +125,71 @@ lazy val types = crossProject(JVMPlatform, JSPlatform)
lazy val parser = crossProject(JVMPlatform, JSPlatform)
.withoutSuffixFor(JVMPlatform)
.crossType(CrossType.Pure)
.settings(commons: _*)
.settings(commons)
.settings(
libraryDependencies ++= Seq(
"org.typelevel" %%% "cats-parse" % catsParseV,
"org.typelevel" %%% "cats-free" % catsV
)
)
.dependsOn(types)
.dependsOn(types, helpers)
lazy val linker = crossProject(JVMPlatform, JSPlatform)
.withoutSuffixFor(JVMPlatform)
.crossType(CrossType.Pure)
.settings(commons: _*)
.settings(commons)
.dependsOn(parser)
lazy val tree = crossProject(JVMPlatform, JSPlatform)
.withoutSuffixFor(JVMPlatform)
.crossType(CrossType.Pure)
.in(file("model/tree"))
.settings(commons: _*)
.settings(commons)
.settings(
libraryDependencies ++= Seq(
"org.typelevel" %%% "cats-free" % catsV
)
)
.dependsOn(helpers)
lazy val raw = crossProject(JVMPlatform, JSPlatform)
.withoutSuffixFor(JVMPlatform)
.crossType(CrossType.Pure)
.in(file("model/raw"))
.settings(commons: _*)
.settings(commons)
.dependsOn(types, tree)
lazy val model = crossProject(JVMPlatform, JSPlatform)
.withoutSuffixFor(JVMPlatform)
.crossType(CrossType.Pure)
.settings(commons: _*)
.dependsOn(types, tree, raw)
.settings(commons)
.dependsOn(types, tree, raw, helpers)
lazy val res = crossProject(JVMPlatform, JSPlatform)
.withoutSuffixFor(JVMPlatform)
.crossType(CrossType.Pure)
.in(file("model/res"))
.settings(commons: _*)
.settings(commons)
.dependsOn(model)
lazy val inline = crossProject(JVMPlatform, JSPlatform)
.withoutSuffixFor(JVMPlatform)
.crossType(CrossType.Pure)
.in(file("model/inline"))
.settings(commons: _*)
.settings(commons)
.dependsOn(raw, model)
lazy val transform = crossProject(JVMPlatform, JSPlatform)
.withoutSuffixFor(JVMPlatform)
.crossType(CrossType.Pure)
.in(file("model/transform"))
.settings(commons: _*)
.dependsOn(model, res, inline)
.settings(commons)
.dependsOn(model, res, inline, res % "test->test")
lazy val semantics = crossProject(JVMPlatform, JSPlatform)
.withoutSuffixFor(JVMPlatform)
.crossType(CrossType.Pure)
.settings(commons: _*)
.settings(commons)
.settings(
libraryDependencies ++= Seq(
"dev.optics" %%% "monocle-core" % monocleV,
@ -245,14 +202,14 @@ lazy val compiler = crossProject(JVMPlatform, JSPlatform)
.withoutSuffixFor(JVMPlatform)
.crossType(CrossType.Pure)
.in(file("compiler"))
.settings(commons: _*)
.dependsOn(semantics, linker, backend, transform % Test)
.settings(commons)
.dependsOn(semantics, linker, backend, transform % "test->test", res % "test->test")
lazy val backend = crossProject(JVMPlatform, JSPlatform)
.withoutSuffixFor(JVMPlatform)
.crossType(CrossType.Pure)
.in(file("backend"))
.settings(commons: _*)
.settings(commons)
.enablePlugins(BuildInfoPlugin)
.settings(
buildInfoKeys := Seq[BuildInfoKey](version),
@ -264,7 +221,7 @@ lazy val definitions = crossProject(JVMPlatform, JSPlatform)
.withoutSuffixFor(JVMPlatform)
.crossType(CrossType.Pure)
.in(file("backend/definitions"))
.settings(commons: _*)
.settings(commons)
.settings(
libraryDependencies ++= Seq(
"io.circe" %%% "circe-core",
@ -278,7 +235,7 @@ lazy val logging = crossProject(JVMPlatform, JSPlatform)
.withoutSuffixFor(JVMPlatform)
.crossType(CrossType.Pure)
.in(file("utils/logging"))
.settings(commons: _*)
.settings(commons)
.settings(
libraryDependencies ++= Seq(
"org.typelevel" %%% "cats-core" % catsV
@ -289,7 +246,7 @@ lazy val constants = crossProject(JVMPlatform, JSPlatform)
.withoutSuffixFor(JVMPlatform)
.crossType(CrossType.Pure)
.in(file("utils/constants"))
.settings(commons: _*)
.settings(commons)
.settings(
libraryDependencies ++= Seq(
"org.typelevel" %%% "cats-core" % catsV
@ -297,25 +254,37 @@ lazy val constants = crossProject(JVMPlatform, JSPlatform)
)
.dependsOn(parser, raw)
lazy val helpers = crossProject(JVMPlatform, JSPlatform)
.withoutSuffixFor(JVMPlatform)
.crossType(CrossType.Pure)
.in(file("utils/helpers"))
.settings(commons)
.settings(
libraryDependencies ++= Seq(
"org.typelevel" %%% "cats-core" % catsV,
"org.typelevel" %%% "cats-free" % catsV
)
)
lazy val `backend-air` = crossProject(JVMPlatform, JSPlatform)
.withoutSuffixFor(JVMPlatform)
.crossType(CrossType.Pure)
.in(file("backend/air"))
.settings(commons: _*)
.settings(commons)
.dependsOn(backend, transform)
lazy val `backend-api` = crossProject(JVMPlatform, JSPlatform)
.withoutSuffixFor(JVMPlatform)
.crossType(CrossType.Pure)
.in(file("backend/api"))
.settings(commons: _*)
.settings(commons)
.dependsOn(backend, transform, `backend-air`)
lazy val `backend-ts` = crossProject(JVMPlatform, JSPlatform)
.withoutSuffixFor(JVMPlatform)
.crossType(CrossType.Pure)
.in(file("backend/ts"))
.settings(commons: _*)
.settings(commons)
.settings(
libraryDependencies ++= Seq(
"io.circe" %%% "circe-core",

171
ci.cjs Normal file
View File

@ -0,0 +1,171 @@
#! /usr/bin/env node
const fs = require("fs").promises;
const path = require("path");
function printUsage() {
console.log(
`Usage: "ci check-consistency" or "ci bump-version %postfix%" or "ci get-version"`
);
}
let postfix;
const mode = process.argv[2];
function validateArgs() {
switch (mode) {
case "get-version":
return true;
case "bump-version":
postfix = process.argv[3];
if (!postfix) {
printUsage();
process.exit();
}
return true;
case "":
case undefined:
case "check-consistency":
return true;
default:
return false;
}
}
const PATHS_TO_PACKAGES = [
"./api/api-npm",
"./language-server/language-server-npm",
"./integration-tests"
];
async function getPackageJsonsRecursive(currentPath) {
return (
await Promise.all(
(await fs.readdir(currentPath, { withFileTypes: true }))
.filter(
(file) =>
file.name !== "node_modules" && file.name !== "@tests" &&
(file.isDirectory() || file.name === "package.json")
)
.map((file) =>
file.isDirectory()
? getPackageJsonsRecursive(
path.join(currentPath, file.name)
)
: Promise.resolve([
path.join(process.cwd(), currentPath, file.name),
])
)
)
).flat();
}
async function getVersion(file) {
const content = await fs.readFile(file);
const json = JSON.parse(content);
return [json.name, json.version];
}
function processDep(obj, name, fn) {
if (!obj) {
return;
}
if (!obj[name]) {
return;
}
fn(obj, obj[name]);
}
async function getVersionsMap(allPackageJsons) {
return new Map(await Promise.all(allPackageJsons.map(getVersion)));
}
function getVersionForPackageOrThrow(versionsMap, packageName) {
const version = versionsMap.get(packageName);
if (!version) {
console.log("Failed to get version for package: ", packageName);
process.exit(1);
}
return version;
}
async function checkConsistency(file, versionsMap) {
console.log("Checking: ", file);
const content = await fs.readFile(file);
const json = JSON.parse(content);
for (const [name, versionInDep] of versionsMap) {
const check = (x, version) => {
if (version.includes("*")) {
return;
}
if (versionInDep !== version) {
console.log(
`Error, versions don't match: ${name}:${version} !== ${versionInDep}`,
file
);
process.exit(1);
}
};
processDep(json.dependencies, name, check);
processDep(json.devDependencies, name, check);
}
}
async function bumpVersions(file, versionsMap) {
console.log("Updating: ", file);
const content = await fs.readFile(file);
const json = JSON.parse(content);
for (const [name, version] of versionsMap) {
const update = (x) => (x[name] = `${version}-${postfix}`);
processDep(json.dependencies, name, update);
processDep(json.devDependencies, name, update);
}
const version = getVersionForPackageOrThrow(versionsMap, json.name);
json.version = `${version}-${postfix}`;
const newContent = JSON.stringify(json, undefined, 4) + "\n";
await fs.writeFile(file, newContent);
}
async function processPackageJsons(allPackageJsons, versionsMap, fn) {
await Promise.all(allPackageJsons.map((x) => fn(x, versionsMap)));
}
async function run() {
if (!validateArgs()) {
printUsage();
process.exit(0);
}
const packageJsons = (
await Promise.all(PATHS_TO_PACKAGES.map(getPackageJsonsRecursive))
).flat();
const versionsMap = await getVersionsMap(packageJsons);
if (mode === "get-version") {
const fjs = versionsMap.get("@fluencelabs/fluence");
console.log(fjs);
return;
}
console.log("Checking versions consistency...");
await processPackageJsons(packageJsons, versionsMap, checkConsistency);
console.log("Versions are consistent");
if (mode === "bump-version") {
console.log("Adding postfix: ", postfix);
await processPackageJsons(packageJsons, versionsMap, bumpVersions);
console.log("Done");
}
}
run();

View File

@ -1,3 +0,0 @@
*.jar
*.tgz
node_modules

View File

@ -1,7 +0,0 @@
Copyright 2021 Fluence Labs
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

View File

@ -1,45 +0,0 @@
## Installation
- run `sbt "cliJS/fullLinkJS"` in a root dir of a project after any code update (better to keep `sbt` running for faster compilation)
- `npm` installed required
- run `npm i` in `npm` directory
- feel free to change `@fluencelabs/fluence` version in `package.json` file, run `npm i` after changes
## Run script
Generally, use this script to run compiled aqua compiler from a repo:
```
npm run from:scalajs -- run -f "someFunction(arg1, arg2)" -i path/to/aqua --data-path path/to/args -m path/to/exports --addr /multiaddr/to/node
```
- `-f or --func` is a function to call with arguments
- `-i or --input` aqua file where function located
- `-m or --import` imports location, could be used multiple times
- `-a or --addr` multiaddress to a Fluence node
- `-p or --data-path` path to a file with JSON where arguments are located
As example, use `test/sample.aqua` with args from `test/data.json` running on `/dns4/kras-04.fluence.dev/tcp/19001/wss/p2p/12D3KooWFEwNWcHqi9rtsmDhsYcDbRUCDXH84RC4FW6UfsFWaoHi` node:
```
npm run from:scalajs -- run -f "identityArgsAndReturn(structField, stringField, numberField)" -i test/sample.aqua --data-path test/data.json --addr /dns4/kras-04.fluence.dev/tcp/19001/wss/p2p/12D3KooWFEwNWcHqi9rtsmDhsYcDbRUCDXH84RC4FW6UfsFWaoHi
```
To simplify experience you can use `./aqua-run.sh` command and change all arguments straight in this file.
## Node addresses
Different Fluence network addresses could be found here: https://github.com/fluencelabs/fluence-network-environment/blob/main/src/index.ts
## Useful flags
- `--print-air` to print resulted air
- `--no-xor` to disable xor wrapping aroung service calls
- `--sk secret_key` send request signed with specific secret key. Secret key could be created with `npm run from:scalajs -- create_keypair` or `aqua create_keypair` if you want to use installed aqua
- `--data "json""` use instead of `--data-path` to pass arguments through command line
- `--timeout 10000` to change timeout
- `--log-level debug/info/..` to change log level
## Builtins for `aqua run`
You can find all builtins in aqua/run-builtins/run-builtins.aqua

View File

@ -1,14 +0,0 @@
#!/bin/bash
FUNC='deploy(tsOracle)'
INPUT='aqua/dist.aqua'
DATAPATH='test/deploy.json'
ADDR='/dns4/kras-04.fluence.dev/tcp/19001/wss/p2p/12D3KooWFEwNWcHqi9rtsmDhsYcDbRUCDXH84RC4FW6UfsFWaoHi'
# IMPORT=
if [ -z "$IMPORT" ]
then
npm run from:scalajs -- run -f "$FUNC" -i "$INPUT" --data-path "$DATAPATH" --addr "$ADDR"
else
npm run from:scalajs -- run -f "$FUNC" -i "$INPUT" --data-path "$DATAPATH" --addr "$ADDR" -m "$IMPORT"
fi

View File

@ -1,96 +0,0 @@
module ServiceDist declares *
import "run-builtins/run-builtins.aqua"
import "@fluencelabs/aqua-lib/builtin.aqua"
import "@fluencelabs/aqua-ipfs/ipfs.aqua"
export deploy, remove, createService, addBlueprint
data ModuleConf:
name: string
path: string
mounted_binaries: ?[][]string
preopened_files: ?[]string
mapped_dirs: ?[][]string
envs: ?[][]string
logger_enabled: ?bool
logging_mask: ?i32
mem_pages_count: ?u32
max_heap_size: ?string
service OpC("op"):
array_length(arr: []ModuleConf) -> u32
data DeployResult:
blueprint_id: string
service_id: string
service DeployHelper("deploy_helper"):
create_result(bid: string, sid: string) -> DeployResult
data ServiceConf:
modules: []ModuleConf
const ON_PEER ?= HOST_PEER_ID
func flattenSS(input: [][]string) -> ?[][]string:
res: *[][]string
res <<- input
<- res
func flattenS(input: []string) -> ?[]string:
res: *[]string
res <<- input
<- res
func deploy(serviceName: string, serviceConf: ServiceConf) -> DeployResult:
on ON_PEER:
multiaddr <- Ipfs.get_external_api_multiaddr()
mod_hashes: *[]string
for m <- serviceConf.modules par:
-- TODO check for cache
Console.print(Op.concat_strings("Going to upload module: ", m.name))
uploadRes <- LocalIpfs.uploadFile(m.path, multiaddr)
cid = uploadRes.cid
Console.print(Op.concat_strings(Op.concat_strings("Module '", m.name), "' was uploaded"))
on ON_PEER:
hostRes <- Ipfs.get(cid)
conf <- Dist.make_module_config(m.name, m.mem_pages_count, m.max_heap_size, m.logger_enabled, m.preopened_files, m.envs, m.mapped_dirs, m.mounted_binaries, m.logging_mask)
mod_hash <- Dist.add_module_from_vault(hostRes.path, conf)
mod_hashes <<- [mod_hash, m.name]
join mod_hashes[OpC.array_length(serviceConf.modules) - 1]
-- sort hashes to the same order as was in serviceConf.modules
sorted_hashes: *string
for m <- serviceConf.modules:
for hash_name <- mod_hashes:
if m.name == hash_name[1]:
sorted_hashes <<- hash_name[0]
Console.print("Now time to make the blueprint...")
on ON_PEER:
blueprint <- Dist.make_blueprint(serviceName, sorted_hashes)
blueprint_id <- Dist.add_blueprint(blueprint)
service_id <- Srv.create(blueprint_id)
res <- DeployHelper.create_result(blueprint_id, service_id)
<- res
func remove(service_id: string):
on ON_PEER:
Srv.remove(service_id)
Console.print("Service was deleted")
func createService(blueprint_id: string) -> string:
on ON_PEER:
service_id <- Srv.create(blueprint_id)
<- service_id
func addBlueprint(bp: AddBlueprint) -> string:
on ON_PEER:
blueprint_id <- Dist.add_blueprint(bp)
<- blueprint_id

View File

@ -1,18 +0,0 @@
module IpfsExports
import "@fluencelabs/aqua-ipfs/ipfs-api.aqua"
import "run-builtins/run-builtins.aqua"
export uploadFile
const ON_PEER ?= HOST_PEER_ID
func uploadFile(path: string) -> UploadResult:
on ON_PEER:
multiaddr <- get_external_api_multiaddr(ON_PEER)
result <- LocalIpfs.uploadFile(path, multiaddr)
<- result

View File

@ -1,41 +0,0 @@
module NetworkInfo
import "@fluencelabs/aqua-lib/builtin.aqua"
export list_modules, list_interfaces_by_peer, get_interface, list_blueprints, list_services
const ON_PEER ?= HOST_PEER_ID
func list_modules() -> []Module:
on ON_PEER:
res <- Dist.list_modules()
<- res
func list_services() -> []Service:
on ON_PEER:
services <- Srv.list()
<- services
func list_interfaces_by_peer(peer: PeerId) -> []Interface:
on ON_PEER:
services <- Srv.list()
interfaces: *Interface
for srv <- services:
if srv.owner_id == peer:
interfaces <- Srv.get_interface(srv.id)
<- interfaces
func get_interface(serviceId: string) -> Interface:
on ON_PEER:
res <- Srv.get_interface(serviceId)
<- res
func get_module_interface(moduleHash: string) -> Interface:
on ON_PEER:
res <- Dist.get_module_interface(moduleHash)
<- res
func list_blueprints() -> []Blueprint:
on ON_PEER:
res <- Dist.list_blueprints()
<- res

View File

@ -1,13 +0,0 @@
import "@fluencelabs/aqua-ipfs/ipfs.aqua"
-- printing strings in console
service Console("run-console"):
print(str: string)
data UploadResult:
error: string
cid: string
size: u64
service LocalIpfs("ipfs"):
uploadFile(path: string, multiaddr: IpfsMultiaddrResult) -> UploadResult

View File

@ -1,28 +0,0 @@
module ServiceScript declares *
import "run-builtins/run-builtins.aqua"
import "@fluencelabs/aqua-lib/builtin.aqua"
import "@fluencelabs/aqua-ipfs/ipfs.aqua"
export schedule, remove, list
const ON_PEER ?= HOST_PEER_ID
func schedule(air: string, interval: ?u64) -> string:
on ON_PEER:
res <- Script.add(air, interval)
Console.print("Script was scheduled")
<- res
func remove(script_id: string):
on ON_PEER:
res <- Script.remove(script_id)
if res:
Console.print("Script was removed")
else:
Console.print("No script with such ID")
func list() -> []ScriptInfo:
on ON_PEER:
res <- Script.list()
<- res

View File

@ -1,5 +0,0 @@
#!/usr/bin/env node
"use strict";
console.error("ERROR: use 'aqua' command!")

View File

@ -1,32 +0,0 @@
#!/usr/bin/env node
"use strict";
handleEPIPE(process.stderr)
handleEPIPE(process.stdout)
function handleEPIPE(stream) {
stream.on('error', onerror)
function onerror(err) {
if (err.code === 'EPIPE') {
stream._write = noopWrite
stream._writev = noopWritev
stream._read = noopRead
return stream.removeListener('error', onerror)
}
if (EE.listenerCount(stream, 'error') === 1) {
stream.removeListener('error', onerror)
stream.emit('error', err)
}
}
}
function noopWrite(chunk, enc, cb) {
cb()
}
function noopRead() {
this.push('')
}
function noopWritev(chunks, cb) {
cb()
}
import "./aqua.js";

View File

@ -1,2 +0,0 @@
// It should work in scala as js.`import`.meta.url, but it doesn't compile for some reasons
export const metaUrl = import.meta.url

File diff suppressed because it is too large Load Diff

View File

@ -1,48 +0,0 @@
{
"name": "@fluencelabs/aqua",
"version": "0.11.7",
"description": "Aqua compiler",
"type": "module",
"files": [
"aqua.js",
"index.js",
"error.js",
"meta-utils.js",
"dist/*",
"aqua/*"
],
"bin": {
"aqua": "index.js",
"aqua-cli": "error.js"
},
"scripts": {
"run": "node index.js",
"from:scalajs": "cp ../cli/.js/target/scala-3.2.2/cli-opt/main.js ./aqua.js && npm run build && npm run run -- $@",
"build": "tsc"
},
"dependencies": {
"@fluencelabs/aqua-ipfs": "0.5.9",
"@fluencelabs/aqua-lib": "0.6.0",
"@fluencelabs/fluence": "0.28.0",
"@fluencelabs/fluence-network-environment": "1.0.14",
"ipfs-http-client": "50.1.2"
},
"devDependencies": {
"ts-node": "10.9.1",
"typescript": "5.1.3"
},
"repository": {
"type": "git",
"url": "git+https://github.com/fluencelabs/aqua.git"
},
"keywords": [
"aqua",
"fluence"
],
"author": "Fluence Labs",
"license": "Apache-2.0",
"bugs": {
"url": "https://github.com/fluencelabs/aqua/issues"
},
"homepage": "https://github.com/fluencelabs/aqua#readme"
}

View File

@ -1,56 +0,0 @@
## Aqua
Aqua is a new-gen language for distributed systems.
Aqua programs are executed on many peers, sequentially
or in parallel, forming a single-use coordination network.
Aqua's runtime is heterogeneous: it includes browsers, servers, devices, all involved in solving a single task.
Therefore, Aqua scripts are compiled into several targets at once, with AIR and Typescript as a default.
## aqua
The package contains a convenience `aqua` wrapper for usage in npm-based projects.
### usage
Get the latest package
```bash
npm i --save-dev @fluencelabs/aqua
```
Create a directory for the source files: `.aqua` and for compiled files: `.ts`
```
mkdir src/aqua src/compiled
```
To compile files run:
```bash
aqua -i ./src/aqua/ -o ./src/compiled
```
Alternatively the compilation script can be put into scripts section of `package.json`
```
...
"scripts": {
...
"compile": "aqua -i ./src/aqua/ -o ./src/compiled"
},
...
```
and can be started with
```
npm run compile
```
### references
- For the list of compiler options see: https://github.com/fluencelabs/aqua
- To get started writing aqua see: https://github.com/fluencelabs/aqua-playground

View File

@ -1,55 +0,0 @@
import {create, globSource} from "ipfs-http-client";
import { Multiaddr, protocols } from "multiaddr";
import { existsSync } from "fs";
type UploadResult = {
cid: string,
size: number
}
export async function uploadFile(
path: string,
multiaddrResult: any,
infoLogger: (s: string) => void,
errorLogger: (s: string) => void
): Promise<UploadResult> {
let rpcAddr;
if (multiaddrResult.success) {
rpcAddr = multiaddrResult.multiaddr;
} else {
errorLogger(
"Failed to retrieve external api multiaddr"
);
throw multiaddrResult.error;
}
let rpcMaddr = new Multiaddr(rpcAddr).decapsulateCode(
protocols.names.p2p.code
);
// HACK: `as any` is needed because ipfs-http-client forgot to add `| Multiaddr` to the `create` types
const ipfs = create(rpcMaddr as any);
infoLogger("created ipfs client to " + rpcMaddr);
await ipfs.id();
infoLogger("connected to ipfs");
if (!existsSync(path)) {
let errMsg = "File does not exist: " + path
errorLogger(
errMsg
);
throw errMsg;
}
const source: any = await globSource(path)
const file = await ipfs.add(source);
infoLogger("file uploaded");
return {
cid: file.cid.toString(),
size: file.size
};
}

View File

@ -1,35 +0,0 @@
{
"target": "12D3KooWMhVpgfQxBLkQkJed8VFNvgN4iE6MD7xCybb1ZYWW2Gtz",
"validators": [
"12D3KooWHk9BjDQBUqnavciRPhAYFvqKBe4ZiPPvde7vDaqgn5er",
"12D3KooWBUJifCTgaxAUrcM9JysqCcS4CS8tiYH5hExbdWCAoNwb",
"12D3KooWJbJFaZ3k5sNd8DjQgg3aERoKtBAnirEvPV8yp76kEXHB",
"12D3KooWCKCeqLPSgMnDjyFsJuWqREDtKNHx1JEBiwaMXhCLNTRb",
"12D3KooWKnRcsTpYx9axkJ6d69LPfpPXrkVLe96skuPTAo76LLVH",
"12D3KooWBSdm6TkqnEFrgBuSkpVE3dR1kr6952DsWQRNwJZjFZBv",
"12D3KooWGzNvhSDsgFoHwpWHAyPf1kcTYCGeRBPfznL8J6qdyu2H",
"12D3KooWF7gjXhQ4LaKj6j7ntxsPpGk34psdQicN2KNfBi9bFKXg",
"12D3KooWB9P1xmV3c7ZPpBemovbwCiRRTKd3Kq2jsVPQN4ZukDfy"
],
"timeout": 5000,
"stringField": "some string",
"numberField": 123,
"structField": {
"numField": 42,
"arrField": ["str1", "str2", "r43r34", "ferer"],
"arr2": [{
"a": "fef",
"b": [1,2,3,4],
"c": "erfer",
"d": "frefe"
},{
"b": [1,2,3,4],
"c": "erfer",
"d": "frefe"
}, {
"a": "as",
"c": "erfer",
"d": "gerrt"
}]
}
}

View File

@ -1,13 +0,0 @@
{
"serviceConf": {
"name": "ts-oracle",
"modules": [
{
"name": "ts-oracle",
"path": "./deploy/ts_oracle.wasm",
"mounted_binaries": []
}
]
}
}

View File

@ -1,44 +0,0 @@
import "@fluencelabs/aqua-lib/builtin.aqua"
-- import "run-builtins.aqua"
data StructType:
numField: u32
arrField: []string
service OpString("op"):
identity(s: string) -> string
service OpNumber("op"):
identity(n: u32) -> u32
service OpStruct("op"):
identity(st: StructType) -> StructType
noop()
func parseBug():
stream: *string
if stream[0] != "FOO":
Op.noop()
func identityArgsAndReturn (structArg: StructType, stringArg: string, numberArg: u32) -> string, u32, StructType:
on HOST_PEER_ID:
sArg <- OpString.identity(stringArg)
nArg = OpNumber.identity (numberArg) + OpNumber.identity (numberArg)
stArg <- OpStruct.identity(structArg)
-- it could be used only on init_peer_id
<- sArg, nArg, stArg
service Ssss("ss"):
foo4: u64 -> u16
func aaa(a: u64) -> u16:
res <- Ssss.foo4(a)
<- res
func bar(callback: u32 -> u32):
callback(1)
func baz():
bar(aaa)

View File

@ -1,26 +0,0 @@
{
"compilerOptions": {
"target": "esnext",
"module": "ESNext",
"lib": [
"ESNext"
],
"declaration": true,
"outDir": "dist",
"moduleResolution": "node",
"strict": true,
"esModuleInterop": true,
"noImplicitAny": false,
"strictNullChecks": false,
"skipLibCheck": true,
},
"include": ["src/**/*"],
"exclude": [
"node_modules",
"dist",
"bundle",
"src/__test__",
"src/compiled"
]
}

View File

@ -1,139 +0,0 @@
package aqua
import aqua.builder.ArgumentGetter
import aqua.js.VarJson
import aqua.parser.expr.func.CallArrowExpr
import aqua.parser.lexer.{CallArrowToken, CollectionToken, LiteralToken, VarToken}
import aqua.parser.lift.Span
import aqua.raw.value.{CollectionRaw, LiteralRaw, ValueRaw, VarRaw}
import aqua.types.*
import aqua.run.CliFunc
import cats.data.*
import cats.data.Validated.{invalid, invalidNec, invalidNel, valid, validNec, validNel}
import cats.effect.Concurrent
import cats.syntax.applicative.*
import cats.syntax.apply.*
import cats.syntax.flatMap.*
import cats.syntax.functor.*
import cats.syntax.semigroup.*
import cats.syntax.traverse.*
import cats.{~>, Id, Semigroup}
import com.monovore.decline.Opts
import fs2.io.file.{Files, Path}
import scala.collection.immutable.SortedMap
import scala.scalajs.js
import scala.scalajs.js.JSON
case class FuncWithData(func: CliFunc, getters: Map[String, VarJson])
object ArgOpts {
// Parses a function name and arguments from a string
def funcOpt: Opts[CliFunc] =
Opts
.option[String]("func", "Function to call with args", "f", "funcName(args)")
.mapValidated { str =>
CliFunc.fromString(str)
}
// Gets data from a file or from a json string
def dataFileOrStringOpt[F[_]: Files: Concurrent]
: Opts[F[ValidatedNec[String, Option[js.Dynamic]]]] =
(AppOpts.wrapWithOption(dataOpt), AppOpts.wrapWithOption(dataFromFileOpt[F])).mapN {
case (dataFromString, dataFromFile) =>
dataFromFile match {
case Some(dataFromFileF) =>
dataFromFileF.map(_.andThen(args => getData(Some(args), dataFromString)))
case None => validNec(dataFromString).pure[F]
}
}
// Creates getters based on function arguments and data, return all info
def funcWithArgsOpt[F[_]: Files: Concurrent]: Opts[F[ValidatedNec[String, FuncWithData]]] = {
(dataFileOrStringOpt[F], funcOpt).mapN { case (dataF, func) =>
dataF.map { dataV =>
dataV.andThen { data =>
VarJson.checkDataGetServices(func.args, data).map { case (argsWithTypes, getters) =>
FuncWithData(func.copy(args = argsWithTypes), getters)
}
}
}
}
}
def dataOpt: Opts[js.Dynamic] =
Opts
.option[String](
"data",
"JSON in { [argumentName]: argumentValue } format. You can call a function using these argument names",
"d",
"json"
)
.mapValidated { str =>
Validated.catchNonFatal {
JSON.parse(str)
}.leftMap(t => NonEmptyList.one("Data argument isn't a valid JSON: " + t.getMessage))
}
def dataFromFileOpt[F[_]: Files: Concurrent]: Opts[F[ValidatedNec[String, js.Dynamic]]] = {
jsonFromFileOpt(
"data-path",
"Path to a JSON file in { [argumentName]: argumentValue } format. You can call a function using these argument names",
"p"
)
}
def jsonFromFileOpt[F[_]: Files: Concurrent](
name: String,
help: String,
short: String
): Opts[F[ValidatedNec[String, js.Dynamic]]] = {
FileOpts.fileOpt(
name,
help,
short,
(path, str) => {
Validated.catchNonFatal {
JSON.parse(str)
}.leftMap(t =>
NonEmptyChain
.one(s"Data in ${path.toString} isn't a valid JSON: " + t.getMessage)
)
}
)
}
def jsonFromFileOpts[F[_]: Files: Concurrent](
name: String,
help: String,
short: String
): Opts[F[ValidatedNec[String, NonEmptyList[(Path, js.Dynamic)]]]] = {
FileOpts.fileOpts(
name,
help,
short,
(path, str) => {
Validated.catchNonFatal {
JSON.parse(str)
}.leftMap(t =>
NonEmptyChain
.one(s"Data in ${path.toString} isn't a valid JSON: " + t.getMessage)
)
}
)
}
// get data from sources, error if both sources exist
def getData(
dataFromArgument: Option[js.Dynamic],
dataFromFile: Option[js.Dynamic]
): ValidatedNec[String, Option[js.Dynamic]] = {
(dataFromArgument, dataFromFile) match {
case (Some(_), Some(_)) =>
// TODO: maybe allow to use both and simple merge with data argument having higher priority
invalidNec("Please use either --data or --data-path. Don't use both")
case _ => validNec(dataFromArgument.orElse(dataFromFile))
}
}
}

View File

@ -1,122 +0,0 @@
package aqua
import aqua.builder.{ArgumentGetter, Service}
import aqua.io.{AquaPath, PackagePath}
import aqua.js.VarJson
import aqua.raw.value.{ValueRaw, VarRaw}
import aqua.run.{CliFunc, GeneralOptions, GeneralOpts, JsonService, RunCommand, RunOpts}
import aqua.logging.LogFormatter
import cats.data.Validated.{invalid, invalidNec, valid, validNec, validNel}
import cats.data.{NonEmptyList, Validated, ValidatedNec}
import cats.effect.ExitCode
import cats.effect.kernel.Async
import cats.syntax.applicative.*
import cats.syntax.flatMap.*
import cats.syntax.functor.*
import cats.syntax.monad.*
import cats.{Applicative, Monad}
import com.monovore.decline.{Command, Opts}
import fs2.io.file.{Files, Path}
import scribe.Logging
import scalajs.js
import scala.concurrent.ExecutionContext
// All info to run any aqua function
case class RunInfo(
common: GeneralOptions,
func: CliFunc,
input: Option[AquaPath],
imports: List[Path] = Nil,
argumentGetters: Map[String, VarJson] = Map.empty,
services: List[Service] = Nil,
jsonServices: List[JsonService] = Nil,
pluginsPaths: List[String] = Nil
)
// Builds subcommand
class SubCommandBuilder[F[_]: Async](
name: String,
header: String,
opts: Opts[F[ValidatedNec[String, RunInfo]]]
) extends Logging {
def command: Command[F[ValidatedNec[String, Unit]]] = Command(name, header) {
opts.map { riF =>
riF.flatMap {
case Validated.Valid(ri) =>
LogFormatter.initLogger(Some(ri.common.logLevel.compiler))
RunCommand.execRun(
ri
)
case i @ Validated.Invalid(_) =>
i.pure[F]
}
}
}
}
object SubCommandBuilder {
def apply[F[_]: Async](
name: String,
header: String,
opts: Opts[ValidatedNec[String, RunInfo]]
): SubCommandBuilder[F] = {
new SubCommandBuilder(name, header, opts.map(_.pure[F]))
}
def applyF[F[_]: Async](
name: String,
header: String,
opts: Opts[F[ValidatedNec[String, RunInfo]]]
): SubCommandBuilder[F] = {
new SubCommandBuilder(name, header, opts)
}
def valid[F[_]: Async](
name: String,
header: String,
opts: Opts[RunInfo]
): SubCommandBuilder[F] = {
SubCommandBuilder(name, header, opts.map(ri => validNec[String, RunInfo](ri)))
}
def simple[F[_]: Async](
name: String,
header: String,
path: AquaPath,
funcName: String
): SubCommandBuilder[F] =
SubCommandBuilder
.valid(
name,
header,
GeneralOpts.opt.map { c =>
RunInfo(c, CliFunc(funcName), Some(path))
}
)
def subcommands[F[_]: Async](
subs: NonEmptyList[SubCommandBuilder[F]]
): Opts[F[ValidatedNec[String, Unit]]] =
Opts.subcommands(subs.head.command, subs.tail.map(_.command): _*)
}
// Builds top command with subcommands
case class CommandBuilder[F[_]: Async](
name: String,
header: String,
subcommands: NonEmptyList[SubCommandBuilder[F]],
rawCommands: List[Command[F[ValidatedNec[String, Unit]]]] = Nil
) {
def command: Command[F[ValidatedNec[String, Unit]]] = {
Command(name = name, header = header) {
Opts.subcommands(
subcommands.head.command,
(subcommands.tail.map(_.command) ++ rawCommands): _*
)
}
}
}

View File

@ -1,34 +0,0 @@
package aqua
import aqua.js.{LogLevel, FluenceJSLogLevel}
import fs2.io.file.Path
import scribe.Level
import scala.util.Try
object LogLevelTransformer {
def logLevelToAvm(logLevel: Level): LogLevel = {
logLevel match {
case Level.Trace => "trace"
case Level.Debug => "debug"
case Level.Info => "info"
case Level.Warn => "warn"
case Level.Error => "error"
case Level.Fatal => "off"
case _ => "info"
}
}
def logLevelToFluenceJS(logLevel: Level): FluenceJSLogLevel = {
logLevel match {
case Level.Trace => "trace"
case Level.Debug => "debug"
case Level.Info => "info"
case Level.Warn => "warn"
case Level.Error => "error"
case Level.Fatal => "silent"
case _ => "info"
}
}
}

View File

@ -1,35 +0,0 @@
package aqua
import aqua.config.ConfigOpts
import aqua.ipfs.IpfsOpts
import aqua.keypair.KeyPairOpts
import aqua.remote.{DistOpts, RemoteOpts}
import aqua.run.RunOpts
import aqua.script.ScriptOpts
import cats.data.ValidatedNec
import cats.effect.ExitCode
import cats.effect.kernel.Async
import cats.syntax.applicative.*
import cats.syntax.apply.*
import cats.syntax.flatMap.*
import cats.syntax.functor.*
import cats.syntax.monad.*
import com.monovore.decline.Opts
import fs2.io.file.{Files, Path}
import scribe.Logging
import scala.concurrent.ExecutionContext
import scala.util.Try
import cats.effect.std.Console
// JS-specific options and subcommands
object PlatformOpts extends Logging {
def opts[F[_]: Files: AquaIO: Async: Console]: Opts[F[ValidatedNec[String, Unit]]] =
Opts.subcommand(RunOpts.runCommand[F]) orElse
Opts.subcommand(KeyPairOpts.command[F]) orElse
Opts.subcommand(IpfsOpts.ipfsOpt[F]) orElse
Opts.subcommand(ScriptOpts.scriptOpt[F]) orElse
Opts.subcommand(RemoteOpts.commands[F]) orElse
Opts.subcommand(ConfigOpts.command[F])
}

View File

@ -1,47 +0,0 @@
package aqua.air
import aqua.backend.AirFunction
import aqua.js.Fluence
import cats.data.Validated.{invalid, validNec}
import cats.data.{Chain, NonEmptyChain, ValidatedNec}
import cats.effect.Async
import cats.syntax.traverse.*
import cats.syntax.functor.*
import cats.syntax.applicative.*
import cats.syntax.flatMap.*
import cats.syntax.functor.*
import scala.concurrent.ExecutionContext
import scala.scalajs.js
object AirValidation {
// HACK: memoize doesn't work in scalajs, so, call this function once before `validate`
def init[F[_]: Async](): F[Unit] = {
Async[F].fromFuture(Fluence.start(js.undefined).toFuture.pure[F]).as(())
}
def validate[F[_]: Async](
airs: List[AirFunction]
): F[ValidatedNec[String, Unit]] =
Async[F].fromFuture {
Async[F].executionContext.map { implicit ec =>
for {
statuses <- airs
.map(a => Fluence.getPeer().internals.parseAst(a.air).toFuture.map(s => (a.name, s)))
.sequence
} yield {
val errors = NonEmptyChain.fromSeq(statuses.filterNot(_._2.success))
errors.map { errs =>
val errorsStrs = errs.map { case (fName, status) =>
s"Cannot compile AIR for '$fName' function: ${js.JSON.stringify(status.data)}\n\n" +
"This is unexpected error. Please, dump your Aqua code and make an issue here https://github.com/fluencelabs/aqua/issues."
}
invalid(errorsStrs)
}.getOrElse(validNec(()))
}
}
}
}

View File

@ -1,49 +0,0 @@
package aqua.builder
import aqua.backend.*
import aqua.js.{CallJsFunction, FluencePeer, ServiceHandler}
import aqua.model.{LiteralModel, VarModel}
import aqua.raw.ops
import aqua.raw.ops.{Call, CallArrowRawTag}
import aqua.raw.value.{LiteralRaw, VarRaw}
import aqua.definitions.*
import cats.data.NonEmptyList
import scala.concurrent.Promise
import scala.scalajs.js
// Service that can return argument to use it from a code
// TODO: create one service with multiple argument getters instead of service per argument
abstract class ArgumentGetter(
serviceId: String,
val function: GetFunction
) extends Service(serviceId, NonEmptyList.one(function)) {
def callTag(): CallArrowRawTag
}
case class GetFunction(value: VarRaw, arg: scalajs.js.Dynamic) extends AquaFunction {
override def fnName: String = value.name
def handler: ServiceHandler = _ => js.Promise.resolve(arg)
def arrow: ArrowTypeDef = ArrowTypeDef(NilTypeDef, UnlabeledProductTypeDef(TopTypeDef :: Nil))
}
object ArgumentGetter {
val ServiceId = "getDataSrv"
private def getFunction(value: VarRaw, arg: scalajs.js.Dynamic) = GetFunction(value, arg)
def apply(value: VarRaw, arg: scalajs.js.Dynamic): ArgumentGetter =
new ArgumentGetter(ServiceId, getFunction(value, arg)) {
override def callTag(): CallArrowRawTag =
CallArrowRawTag.service(
LiteralRaw.quote(ServiceId),
value.name,
Call(List.empty, List(Call.Export(value.name, value.baseType)))
)
}
}

View File

@ -1,42 +0,0 @@
package aqua.builder
import aqua.backend.*
import aqua.io.OutputPrinter
import aqua.js.{CallJsFunction, FluencePeer, ServiceHandler}
import aqua.types.ScalarType
import aqua.definitions.*
import cats.data.NonEmptyList
import scribe.Logging
import scala.scalajs.js
import scala.scalajs.js.JSON
private case class Console(serviceId: String, functions: NonEmptyList[AquaFunction])
extends Service(serviceId, functions)
object Console extends Logging {
private def printFunction(funcName: String) = new AquaFunction {
override def fnName: String = funcName
def handler: ServiceHandler = { varArgs =>
js.typeOf(varArgs(0)) match {
case "string" | "number" | "boolean" => println(varArgs(0).toString)
case _ => println(JSON.stringify(varArgs(0), space = 2))
}
js.Promise.resolve(Service.emptyObject)
}
def arrow: ArrowTypeDef = ArrowTypeDef(
LabeledProductTypeDef(("str", ScalarTypeDef.fromScalar(ScalarType.string)) :: Nil),
NilTypeDef
)
}
val PrintName = "print"
def apply(serviceId: String = "run-console"): Console = {
Console(serviceId, NonEmptyList.one(printFunction(PrintName)))
}
}

View File

@ -1,49 +0,0 @@
package aqua.builder
import aqua.backend.*
import aqua.ipfs.js.IpfsApi
import aqua.js.{CallJsFunction, FluencePeer, ServiceHandler}
import aqua.types.ScalarType
import aqua.definitions.*
import cats.data.NonEmptyList
import scribe.Logging
import scala.scalajs.js
object DeployHelper extends Logging {
private val CreateResult = "create_result"
private def createResult(funcName: String): AquaFunction = new AquaFunction {
override def fnName: String = funcName
override def handler: ServiceHandler = args => {
val bid = args(0)
val sid = args(1)
js.Promise.resolve(js.Dynamic.literal(blueprint_id = bid, service_id = sid))
}
def arrow: ArrowTypeDef = ArrowTypeDef(
LabeledProductTypeDef(
("bid", ScalarTypeDef.fromScalar(ScalarType.string)) :: (
"sid",
ScalarTypeDef.fromScalar(ScalarType.string)
) :: Nil
),
UnlabeledProductTypeDef(
StructTypeDef(
"DeployResult",
Map(
"blueprint_id" -> ScalarTypeDef.fromScalar(ScalarType.string),
"service_id" -> ScalarTypeDef.fromScalar(ScalarType.string)
)
) :: Nil
)
)
}
def apply(serviceId: String = "deploy_helper"): Service = {
val funcs = NonEmptyList.one(createResult(CreateResult))
Service(serviceId, funcs)
}
}

View File

@ -1,49 +0,0 @@
package aqua.builder
import aqua.backend.*
import aqua.js.{CallJsFunction, FluencePeer, ServiceHandler}
import aqua.model.{LiteralModel, VarModel}
import aqua.raw.ops.{Call, CallArrowRawTag}
import aqua.definitions.*
import aqua.raw.value.LiteralRaw
import cats.data.NonEmptyList
import scala.concurrent.Promise
import scala.scalajs.js
import scala.scalajs.js.{Dynamic, JSON}
// Will finish promise on service call
abstract class Finisher private (
serviceId: String,
functions: NonEmptyList[AquaFunction],
val promise: Promise[Unit]
) extends Service(serviceId, functions) {
def callTag(): CallArrowRawTag
}
object Finisher {
private def finishFunction(funcName: String, promise: Promise[Unit]) = new AquaFunction {
def fnName: String = funcName
def handler: ServiceHandler = _ => {
promise.success(())
js.Promise.resolve(Service.emptyObject)
}
def arrow: ArrowTypeDef = ArrowTypeDef(NilTypeDef, NilTypeDef)
}
def apply(servId: String, fnName: String): Finisher = {
val promise = Promise[Unit]()
val funcs = NonEmptyList.one(finishFunction(fnName, promise))
new Finisher(servId, funcs, promise) {
def callTag(): CallArrowRawTag =
CallArrowRawTag.service(
LiteralRaw.quote(servId),
fnName,
Call(Nil, Nil)
)
}
}
}

View File

@ -1,56 +0,0 @@
package aqua.builder
import aqua.backend.*
import aqua.ipfs.js.IpfsApi
import aqua.js.{CallJsFunction, FluencePeer, ServiceHandler}
import aqua.types.ScalarType
import aqua.definitions.*
import cats.data.NonEmptyList
import scribe.Logging
import scala.scalajs.js
object IPFSUploader extends Logging {
private val UploadFile = "uploadFile"
private def uploadFunc(funcName: String): AquaFunction = new AquaFunction {
override def fnName: String = funcName
private def logError(s: String) = logger.error(s)
private def logInfo(s: String) = logger.info(s)
override def handler: ServiceHandler = args => {
IpfsApi
.uploadFile(args(0), args(1), logInfo, logError)
.`catch` { err =>
js.Dynamic.literal(error = "File upload error: " + err)
}
}
def arrow: ArrowTypeDef = ArrowTypeDef(
LabeledProductTypeDef(
("path", ScalarTypeDef.fromScalar(ScalarType.string)) :: (
"multiaddr",
ScalarTypeDef.fromScalar(ScalarType.string)
) :: Nil
),
UnlabeledProductTypeDef(
StructTypeDef(
"UploadResult",
Map(
"error" -> ScalarTypeDef.fromScalar(ScalarType.string),
"cid" -> ScalarTypeDef.fromScalar(ScalarType.string),
"size" -> ScalarTypeDef.fromScalar(ScalarType.u64)
)
) :: Nil
)
)
}
def apply(serviceId: String): Service = {
val funcs = NonEmptyList.one(uploadFunc(UploadFile))
Service(serviceId, funcs)
}
}

View File

@ -1,60 +0,0 @@
package aqua.builder
import aqua.backend.*
import aqua.io.OutputPrinter
import aqua.js.{CallJsFunction, FluencePeer, ServiceHandler}
import aqua.raw.ops.{Call, CallArrowRawTag}
import aqua.raw.value.{LiteralRaw, VarRaw}
import aqua.definitions.*
import aqua.types.ScalarType
import cats.data.NonEmptyList
import scala.scalajs.js
import scala.scalajs.js.{Dynamic, JSON}
// Function to print any variables that passed as arguments
abstract class ResultPrinter(serviceId: String, functions: NonEmptyList[AquaFunction])
extends Service(serviceId, functions) {
def callTag(variables: List[VarRaw]): CallArrowRawTag
}
object ResultPrinter {
private def resultPrinterFunc(funcName: String, resultNames: List[String]) = new AquaFunction {
override def fnName: String = funcName
override def handler: ServiceHandler = varArgs => {
// drop last argument (tetraplets)
val args: Seq[js.Any] = varArgs.init
val toPrint = args.toList match {
case arg :: Nil => JSON.stringify(arg, space = 2)
case _ => args.map(a => JSON.stringify(a, space = 2)).mkString("[\n", ",\n", "\n]")
}
// if an input function returns a result, our success will be after it is printed
// otherwise finish after JS SDK will finish sending a request
OutputPrinter.print(toPrint)
// empty JS object
js.Promise.resolve(Service.emptyObject)
}
def arrow: ArrowTypeDef = ArrowTypeDef(
LabeledProductTypeDef(resultNames.map(n => (n, TopTypeDef))),
NilTypeDef
)
}
def apply(serviceId: String, fnName: String, resultNames: List[String]): ResultPrinter = {
val funcs = NonEmptyList.one(resultPrinterFunc(fnName, resultNames))
new ResultPrinter(serviceId, funcs) {
def callTag(variables: List[VarRaw]): CallArrowRawTag =
CallArrowRawTag.service(
LiteralRaw.quote(serviceId),
fnName,
Call(variables, Nil)
)
}
}
}

View File

@ -1,45 +0,0 @@
package aqua.builder
import aqua.backend.*
import aqua.js.{CallJsFunction, FluencePeer, ServiceHandler}
import aqua.definitions.*
import cats.data.NonEmptyList
import scribe.Logging
import scala.scalajs.js
import scala.scalajs.js.{Dynamic, JSON}
class Service(serviceId: String, functions: NonEmptyList[AquaFunction]) extends Logging {
def register(peer: FluencePeer): Unit = {
val handlers = functions.map(f => (f.fnName, f.handler))
val defs = LabeledProductTypeDef(
functions.map(f => (f.fnName, f.arrow)).toList
)
logger.debug(
s"Registering service $serviceId with functions ${functions.map(_.fnName).toList.mkString(",")}"
)
CallJsFunction.registerService(
peer,
serviceId,
handlers.toList,
ServiceDef(
None,
defs,
""
)
)
}
}
trait AquaFunction {
def fnName: String
def handler: ServiceHandler
def arrow: ArrowTypeDef
}
object Service {
val emptyObject: Dynamic = Dynamic.literal()
}

View File

@ -1,54 +0,0 @@
package aqua.config
import aqua.js.{FluenceEnvironment, FluenceNode}
import cats.Applicative
import cats.data.{Validated, ValidatedNec}
import cats.data.Validated.{invalidNel, validNel}
import cats.effect.ExitCode
import cats.effect.kernel.Async
import cats.syntax.applicative.*
import com.monovore.decline.{Command, Opts}
import cats.data.Validated.{invalidNec, validNec}
import scala.scalajs.js
object ConfigOpts {
def command[F[_]: Async]: Command[F[ValidatedNec[String, Unit]]] =
Command(name = "config", header = "Aqua CLI configuration") {
Opts.subcommands(
listPeers
)
}
val Krasnodar = "krasnodar"
val Stage = "stage"
val TestNet = "testnet"
def envArg: Opts[js.Array[FluenceNode]] =
Opts
.argument[String](s"$Krasnodar | $Stage | $TestNet")
.withDefault(Krasnodar)
.mapValidated {
case Krasnodar =>
validNel(FluenceEnvironment.krasnodar)
case TestNet =>
validNel(FluenceEnvironment.testnet)
case Stage =>
validNel(FluenceEnvironment.stage)
case e =>
invalidNel(
s"There is no environment '$e' in our list. Use one of these: '$Krasnodar', '$TestNet', '$Stage'"
)
}
def listPeers[F[_]: Applicative]: Command[F[ValidatedNec[String, Unit]]] =
Command(
name = "default_peers",
header = "List addresses of default peers in Fluence network"
) {
envArg.map { env =>
validNec(println(env.toList.map(n => n.multiaddr).mkString("\n"))).pure[F]
}
}
}

View File

@ -1,64 +0,0 @@
package aqua.ipfs
import aqua.{
AppOpts,
AquaIO,
CommandBuilder,
FluenceOpts,
LogLevelTransformer,
PlatformOpts,
RunInfo,
SubCommandBuilder
}
import aqua.keypair.KeyPairShow.show
import cats.data.{NonEmptyChain, NonEmptyList, Validated, ValidatedNec, ValidatedNel}
import Validated.{invalid, invalidNec, valid, validNec, validNel}
import aqua.builder.IPFSUploader
import aqua.io.PackagePath
import aqua.ipfs.js.IpfsApi
import aqua.model.LiteralModel
import aqua.raw.value.LiteralRaw
import aqua.run.{GeneralOptions, RunCommand, RunConfig, RunOpts, GeneralOpts, CliFunc}
import cats.effect.{Concurrent, ExitCode, Resource, Sync}
import cats.syntax.flatMap.*
import cats.syntax.functor.*
import cats.syntax.applicative.*
import cats.syntax.apply.*
import cats.effect.kernel.Async
import cats.syntax.show.*
import cats.{Applicative, Monad}
import com.monovore.decline.{Command, Opts}
import fs2.io.file.{Files, Path}
import scribe.Logging
import scala.concurrent.{ExecutionContext, Future}
import scala.scalajs.js
// Options and commands to work with IPFS
object IpfsOpts extends Logging {
val IpfsAqua = "aqua/ipfs.aqua"
val UploadFuncName = "uploadFile"
def pathOpt: Opts[String] =
Opts
.option[String]("path", "Path to a file", "p", "path")
def ipfsOpt[F[_]: Async]: Command[F[ValidatedNec[String, Unit]]] =
CommandBuilder("ipfs", "Work with IPFS on a peer", NonEmptyList.one(upload[F])).command
// Uploads a file to IPFS
def upload[F[_]: Async]: SubCommandBuilder[F] =
SubCommandBuilder.valid(
"upload",
"Upload a file to IPFS",
(GeneralOpts.opt, pathOpt).mapN { (common, path) =>
RunInfo(
common,
CliFunc(UploadFuncName, LiteralRaw.quote(path) :: Nil),
Option(PackagePath(IpfsAqua))
)
}
)
}

View File

@ -1,16 +0,0 @@
package aqua.ipfs.js
import scala.scalajs.js
import scala.scalajs.js.annotation.{JSExportAll, JSImport}
object IpfsApi {
@js.native
@JSImport("./dist/ipfs.js", "uploadFile")
def uploadFile(
path: js.Any,
multiaddrResult: js.Any,
infoLogger: js.Any,
errorLogger: js.Any
): js.Promise[js.Dynamic] = js.native
}

View File

@ -1,43 +0,0 @@
package aqua.keypair
import aqua.io.OutputPrinter
import aqua.js.KeyPair
import aqua.keypair.KeyPairShow.show
import cats.data.ValidatedNec
import cats.effect.ExitCode
import cats.effect.kernel.Async
import cats.syntax.applicative.*
import cats.syntax.functor.*
import cats.syntax.show.*
import cats.{Applicative, Monad}
import com.monovore.decline.{Command, Opts}
import cats.data.Validated.{invalidNec, validNec}
import scribe.Logging
import scala.concurrent.{ExecutionContext, Future}
// Options and commands to work with KeyPairs
object KeyPairOpts extends Logging {
def command[F[_]: Async]: Command[F[ValidatedNec[String, Unit]]] =
Command(name = "key", header = "Manage local keys and identity") {
Opts.subcommands(
createKeypair
)
}
// KeyPair generation
def createKeypair[F[_]: Async]: Command[F[ValidatedNec[String, Unit]]] =
Command(
name = "create",
header = "Generate new key pair"
) {
Opts.unit.map(_ =>
Async[F]
.fromFuture(
KeyPair.randomEd25519().toFuture.pure[F]
)
.map(keypair => validNec(OutputPrinter.print(s"${keypair.show}")))
)
}
}

View File

@ -1,17 +0,0 @@
package aqua.keypair
import aqua.js.{KeyPair, KeyPairOp}
import cats.Show
import java.util.Base64
import scala.scalajs.js
import scala.scalajs.js.JSON
object KeyPairShow {
def stringify(keypair: KeyPair): String = {
JSON.stringify(KeyPairOp.toDynamicJSON(keypair), space = 4)
}
implicit val show: Show[KeyPair] = Show.show(KeyPairShow.stringify)
}

View File

@ -1,171 +0,0 @@
package aqua.remote
import aqua.ArgOpts.jsonFromFileOpt
import aqua.builder.ArgumentGetter
import aqua.raw.value.{LiteralRaw, VarRaw}
import aqua.run.{GeneralOptions, GeneralOpts, CliFunc}
import aqua.types.{ArrayType, ScalarType, StructType}
import aqua.*
import aqua.io.PackagePath
import aqua.js.{JsonEncoder, VarJson}
import cats.data.{NonEmptyList, NonEmptyMap, ValidatedNec}
import cats.data.Validated.{invalidNec, validNec}
import cats.effect.{Async, Concurrent, ExitCode, Resource, Sync}
import cats.syntax.applicative.*
import cats.syntax.apply.*
import cats.syntax.flatMap.*
import cats.syntax.functor.*
import cats.syntax.traverse.*
import cats.syntax.show.*
import cats.{Applicative, Monad}
import com.monovore.decline.Opts
import fs2.io.file.Files
import scribe.Logging
import scala.collection.immutable.SortedMap
import scala.scalajs.js.JSConverters.*
import scala.scalajs.js
// Options and commands to work blueprints, modules and services
object DistOpts extends Logging {
val DistAqua = "aqua/dist.aqua"
val DeployFuncName = "deploy"
val RemoveFuncName = "remove"
val CreateServiceFuncName = "createService"
val AddBlueprintFuncName = "addBlueprint"
def srvNameOpt: Opts[String] =
Opts
.option[String]("service", "Service to deploy from the config file")
def srvIdOpt: Opts[String] =
Opts
.option[String]("id", "Service id to remove", "i")
def blueprintIdOpt: Opts[String] =
Opts
.option[String]("id", "Blueprint id", "i")
def blueprintNameOpt: Opts[String] =
Opts
.option[String]("name", "Blueprint name", "n")
def dependencyOpt: Opts[NonEmptyList[String]] =
Opts
.options[String]("dependency", "Blueprint dependency. May be used several times", "d")
// Removes service from a node
def remove[F[_]: Async]: SubCommandBuilder[F] =
SubCommandBuilder.valid(
"remove_service",
"Remove service",
(GeneralOpts.opt, srvIdOpt).mapN { (common, srvId) =>
RunInfo(
common,
CliFunc(RemoveFuncName, LiteralRaw.quote(srvId) :: Nil),
Option(PackagePath(DistAqua))
)
}
)
def createService[F[_]: Async]: SubCommandBuilder[F] =
SubCommandBuilder.valid(
"create_service",
"Deploy service from existing blueprint",
(GeneralOpts.opt, blueprintIdOpt).mapN { (common, blueprintId) =>
RunInfo(
common,
CliFunc(CreateServiceFuncName, LiteralRaw.quote(blueprintId) :: Nil),
Option(PackagePath(DistAqua))
)
}
)
def addBlueprint[F[_]: Async]: SubCommandBuilder[F] =
SubCommandBuilder.valid(
"add_blueprint",
"Add blueprint to a peer",
(GeneralOpts.opt, blueprintNameOpt, dependencyOpt).mapN {
(common, blueprintName, dependencies) =>
val depsWithHash = dependencies.map { d =>
if (d.startsWith("hash:"))
d
else
"hash:" + d
}
val addBlueprintType = StructType(
"AddBlueprint",
NonEmptyMap.of(
("name", ScalarType.string),
("dependencies", ArrayType(ScalarType.string))
)
)
val addBlueprintRequestVar =
VarRaw("addBlueprint", addBlueprintType)
RunInfo(
common,
CliFunc(AddBlueprintFuncName, addBlueprintRequestVar :: Nil),
Option(PackagePath(DistAqua)),
Nil,
Map(
addBlueprintRequestVar.name -> VarJson(
addBlueprintRequestVar,
js.Dynamic
.literal("name" -> blueprintName, "dependencies" -> depsWithHash.toList.toJSArray)
)
)
)
}
)
def configFromFileOpt[F[_]: Files: Concurrent]: Opts[F[ValidatedNec[String, js.Dynamic]]] = {
jsonFromFileOpt("config-path", "Path to a deploy config", "p")
}
// Uploads a file to IPFS, creates blueprints and deploys a service
def deploy[F[_]: Async]: SubCommandBuilder[F] =
SubCommandBuilder.applyF(
"deploy_service",
"Deploy service from WASM modules",
(
GeneralOpts.optWithSecretKeyCustomTimeout(60000),
configFromFileOpt[F],
srvNameOpt
).mapN { (common, configFromFileF, srvName) =>
configFromFileF.map { dff =>
dff
.andThen(config =>
val srvConfig = {
val c = config.selectDynamic(srvName)
if (js.isUndefined(c)) None
else Some(c)
}
srvConfig match {
case Some(c) =>
JsonEncoder.aquaTypeFromJson(srvName, c).andThen { configType =>
val srvArg = VarRaw(srvName, configType)
val args = LiteralRaw.quote(srvName) :: srvArg :: Nil
// if we have default timeout, increase it
validNec(
RunInfo(
common,
CliFunc(DeployFuncName, args),
Option(PackagePath(DistAqua)),
Nil,
// hack: air cannot use undefined fields, fill undefined arrays with nils
Map(srvName -> VarJson(srvArg, c))
)
)
}
case None =>
invalidNec(s"No service '$srvName' in the config.")
}
)
}
}
)
}

View File

@ -1,120 +0,0 @@
package aqua.remote
import aqua.builder.IPFSUploader
import DistOpts.*
import aqua.ipfs.IpfsOpts.{pathOpt, UploadFuncName}
import aqua.model.{LiteralModel, ValueModel}
import aqua.raw.value.{LiteralRaw, ValueRaw}
import aqua.run.{GeneralOptions, GeneralOpts, RunCommand, RunConfig, RunOpts, CliFunc}
import aqua.*
import cats.Applicative
import cats.data.{NonEmptyList, Validated}
import Validated.{invalidNel, validNel}
import aqua.io.PackagePath
import cats.effect.ExitCode
import cats.effect.kernel.Async
import cats.syntax.applicative.*
import cats.syntax.apply.*
import cats.syntax.flatMap.*
import cats.syntax.functor.*
import com.monovore.decline.{Command, Opts}
import fs2.io.file.Path
import scala.concurrent.ExecutionContext
import scala.scalajs.js
object RemoteInfoOpts {
val NetworkAqua = "aqua/network-info.aqua"
val ListModulesFuncName = "list_modules"
val ListBlueprintsFuncName = "list_blueprints"
val ListInterfacesByPeerFuncName = "list_interfaces_by_peer"
val ListInterfacesFuncName = "list_services"
val GetInterfaceFuncName = "get_interface"
val GetModuleInterfaceFuncName = "get_module_interface"
def ownerOpt: Opts[String] =
Opts
.option[String]("owner", "PeerId", "o")
def allFlag: Opts[Boolean] =
Opts
.flag("all", "Get all services on a node")
.map(_ => true)
.withDefault(false)
def idOpt: Opts[String] =
Opts
.option[String]("id", "Service ID", "s")
def listModules[F[_]: Async]: SubCommandBuilder[F] =
SubCommandBuilder.simple(
ListModulesFuncName,
"List all modules on a peer",
PackagePath(NetworkAqua),
ListModulesFuncName
)
def listBlueprints[F[_]: Async]: SubCommandBuilder[F] =
SubCommandBuilder.simple(
ListBlueprintsFuncName,
"List all blueprints on a peer",
PackagePath(NetworkAqua),
ListBlueprintsFuncName
)
def listInterfaces[F[_]: Async]: SubCommandBuilder[F] =
SubCommandBuilder.valid(
"list_interfaces",
"List all service interfaces on a peer by a given owner",
(GeneralOpts.opt, AppOpts.wrapWithOption(ownerOpt), allFlag).mapN {
(common, peer, printAll) =>
if (printAll)
RunInfo(
common,
CliFunc(
ListInterfacesFuncName,
Nil
),
Option(PackagePath(NetworkAqua))
)
else
RunInfo(
common,
CliFunc(
ListInterfacesByPeerFuncName,
peer.map(LiteralRaw.quote).getOrElse(ValueRaw.InitPeerId) :: Nil
),
Option(PackagePath(NetworkAqua))
)
}
)
def getInterface[F[_]: Async]: SubCommandBuilder[F] =
SubCommandBuilder.valid(
GetInterfaceFuncName,
"Show interface of a service",
(GeneralOpts.opt, idOpt).mapN { (common, serviceId) =>
RunInfo(
common,
CliFunc(GetInterfaceFuncName, LiteralRaw.quote(serviceId) :: Nil),
Option(PackagePath(NetworkAqua))
)
}
)
def getModuleInterface[F[_]: Async]: SubCommandBuilder[F] =
SubCommandBuilder.valid(
GetModuleInterfaceFuncName,
"Print a module interface",
(GeneralOpts.opt, idOpt).mapN { (common, serviceId) =>
RunInfo(
common,
CliFunc(GetModuleInterfaceFuncName, LiteralRaw.quote(serviceId) :: Nil),
Option(PackagePath(NetworkAqua))
)
}
)
}

View File

@ -1,23 +0,0 @@
package aqua.remote
import aqua.{AquaIO, CommandBuilder}
import cats.data.{NonEmptyList, ValidatedNec}
import cats.effect.ExitCode
import cats.effect.kernel.Async
import com.monovore.decline.Command
import RemoteInfoOpts.*
import DistOpts.*
object RemoteOpts {
// All remote commands
def commands[F[_]: AquaIO: Async]: Command[F[ValidatedNec[String, Unit]]] =
CommandBuilder(
"remote",
"Manage and query services on a remote peer",
NonEmptyList(
deploy,
remove :: createService :: addBlueprint :: listModules :: listBlueprints :: listInterfaces :: getInterface :: Nil
)
).command
}

View File

@ -1,92 +0,0 @@
package aqua.run
import aqua.AppOpts
import aqua.FluenceOpts.*
import aqua.builder.{ArgumentGetter, Service}
import aqua.config.ConfigOpts.{Krasnodar, Stage, TestNet}
import aqua.js.FluenceEnvironment
import aqua.raw.ConstantRaw
import aqua.raw.value.VarRaw
import aqua.logging.LogLevels
import cats.data.{NonEmptyList, Validated}
import cats.data.Validated.{invalidNel, validNel}
import cats.syntax.applicative.*
import cats.syntax.apply.*
import com.monovore.decline.Opts
import scribe.Level
import java.util.concurrent.TimeUnit
import scala.concurrent.duration.Duration
import scala.scalajs.js
import scala.util.Try
object GeneralOpts {
val multiaddrOpt: Opts[String] =
Opts
.option[String]("addr", "Relay multiaddress", "a")
.mapValidated { s =>
if ((s.startsWith("/dns4/") || s.startsWith("/ip4/")) && s.contains("/p2p/12D3")) {
validNel(s)
} else {
Validated.catchNonFatal {
val splitted = s.split("-")
val index = splitted(1).toInt
splitted.head.toLowerCase match {
case Krasnodar =>
validNel(FluenceEnvironment.krasnodar(index).multiaddr)
case TestNet =>
validNel(FluenceEnvironment.testnet(index).multiaddr)
case Stage =>
validNel(FluenceEnvironment.stage(index).multiaddr)
case _ =>
invalidNel(
// TODO: maybe show an example of valid format in this error message and in the one below
"Invalid multiaddr format. Run 'aqua config default_peers' for valid multiaddress."
)
}
}.andThen(identity)
.leftMap(_ =>
NonEmptyList.one(
"Invalid multiaddr format. Run 'aqua config default_peers' for valid multiaddress."
)
)
}
}
def flagsOpt(isRun: Boolean): Opts[Flags] =
((
printAir,
showConfigOpt,
verboseOpt
) ++ {
if (isRun)
(AppOpts.noXorWrapper, AppOpts.noRelay)
else
(false.pure[Opts], false.pure[Opts])
}).mapN(Flags.apply)
def commonOpt(
isRun: Boolean,
withSecret: Boolean,
withConstants: Boolean,
defaultTimeout: Duration = Duration(7000, TimeUnit.MILLISECONDS)
): Opts[GeneralOptions] =
(
timeoutOpt.withDefault(defaultTimeout),
logLevelOpt,
multiaddrOpt,
onOpt,
flagsOpt(isRun),
if (withSecret) { secretKeyOpt.map(Some.apply) }
else { AppOpts.wrapWithOption(secretKeyOpt) },
if (withConstants) AppOpts.constantOpts else Nil.pure[Opts]
).mapN(GeneralOptions.apply)
val opt: Opts[GeneralOptions] = commonOpt(false, false, false)
val runOpt: Opts[GeneralOptions] = commonOpt(true, false, true)
val optWithSecretKey: Opts[GeneralOptions] = commonOpt(false, true, false)
def optWithSecretKeyCustomTimeout(timeoutMs: Int): Opts[GeneralOptions] =
commonOpt(false, true, false, Duration(timeoutMs, TimeUnit.MILLISECONDS))
}

View File

@ -1,149 +0,0 @@
package aqua.run
import aqua.LogLevelTransformer
import aqua.builder.{ArgumentGetter, Finisher, ResultPrinter, Service}
import aqua.definitions.FunctionDef
import aqua.io.OutputPrinter
import aqua.js.*
import aqua.keypair.KeyPairShow.show
import aqua.run.RunCommand.createKeyPair
import aqua.run.plugin.Plugin
import cats.data.Validated.{invalidNec, validNec}
import cats.data.ValidatedNec
import cats.effect.kernel.Async
import cats.effect.{Resource, Sync}
import cats.syntax.applicative.*
import cats.syntax.flatMap.*
import cats.syntax.show.*
import scala.concurrent.duration.Duration
import scala.concurrent.{ExecutionContext, Future, Promise, TimeoutException}
import scala.scalajs.js
import scala.scalajs.js.JSConverters.*
import scala.scalajs.js.{JSON, JavaScriptException, timers}
object FuncCaller {
/**
* Register services and call an air code with FluenceJS SDK.
* @param air code to call
* @return
*/
def funcCall[F[_]: Async](
name: String,
air: String,
functionDef: FunctionDef,
config: RunConfig,
resultPrinterService: ResultPrinter,
finisherService: Finisher,
services: List[Service],
getters: List[ArgumentGetter],
plugins: List[String]
): F[ValidatedNec[String, Unit]] = {
FluenceUtils.setLogLevel(
LogLevelTransformer.logLevelToFluenceJS(config.common.logLevel.fluencejs)
)
// stops peer in any way at the end of execution
val resource = Resource.make(Fluence.getPeer().pure[F]) { peer =>
Async[F].fromFuture(Sync[F].delay(peer.stop().toFuture))
}
resource.use { peer =>
Async[F].executionContext.flatMap { implicit ec =>
Async[F].fromFuture {
(for {
keyPair <- createKeyPair(config.common.secretKey)
logLevel: js.UndefOr[aqua.js.LogLevel] = LogLevelTransformer.logLevelToAvm(
config.common.logLevel.aquavm
)
pc = PeerConfig(
config.common.multiaddr,
config.common.timeout.toMillis.toInt : js.UndefOr[Int],
keyPair,
Debug(printParticleId = config.common.flags.verbose, marineLogLevel = logLevel)
)
peerConfig = Some(
pc.createObj()
).orUndefined
_ <- Fluence.start(peerConfig).toFuture
_ =
if (config.common.flags.showConfig) {
val configJson = KeyPairOp.toDynamicJSON(keyPair)
configJson.updateDynamic("relay")(config.common.multiaddr)
configJson.updateDynamic("timeout")(config.common.timeout.toMillis)
configJson.updateDynamic("log-level")(config.common.logLevel.compiler.name)
OutputPrinter.print(JSON.stringify(configJson, null, 4))
}
// register all services
_ = (services ++ getters :+ finisherService :+ resultPrinterService).map(_.register(peer))
// register all plugins
plugins <- Plugin.getPlugins(plugins)
_ = plugins.map(_.register(peer))
callFuture = CallJsFunction.funcCallJs(
air,
functionDef,
List.empty
)
// error will be thrown on failed call
_ <- callFuture
finisherFuture = finisherService.promise.future
// use a timeout in finisher if we have an async function and it hangs on node's side
finisher = setTimeout(name, finisherFuture, config.common.timeout)
_ <- finisher
_ <- Fluence.stop().toFuture
} yield validNec(()))
.recover(handleFuncCallErrors(name, config.common.timeout))
.pure[F]
}
}
}
}
private def setTimeout[T](funcName: String, f: Future[T], timeout: Duration)(implicit
ec: ExecutionContext
): Future[T] = {
val p = Promise[T]()
val timeoutHandle =
timers.setTimeout(timeout.toMillis)(
p.tryFailure(new TimeoutException(timeoutErrorMessage(funcName, timeout, None)))
)
f.onComplete { result =>
timers.clearTimeout(timeoutHandle)
p.tryComplete(result)
}
p.future
}
private def timeoutErrorMessage(funcName: String, timeout: Duration, pid: Option[String]) = {
val pidStr = pid.map(s => " " + s).getOrElse("")
s"Function '$funcName' timed out after ${timeout.toMillis} milliseconds. Increase the timeout with '--timeout' option or check if your code can hang while executing$pidStr."
}
private def handleFuncCallErrors(
funcName: String,
timeout: Duration
): PartialFunction[Throwable, ValidatedNec[String, Unit]] = { t =>
val message =
t match {
case te: TimeoutException => te.getMessage
case t if t.getMessage.contains("Request timed out after") =>
val msg = t.getMessage
timeoutErrorMessage(
funcName,
timeout,
Some(msg.substring(msg.indexOf("particle id") - 1, msg.length))
)
case tjs: JavaScriptException =>
val msg = tjs.exception.asInstanceOf[js.Dynamic].selectDynamic("message")
if (scalajs.js.isUndefined(msg)) JSON.stringify(tjs.exception.asInstanceOf[js.Any])
else msg.toString
case _ => t.toString
}
invalidNec(message)
}
}

View File

@ -1,96 +0,0 @@
package aqua.run
import aqua.ArgOpts.jsonFromFileOpts
import aqua.builder.{AquaFunction, ArgumentGetter, Service}
import aqua.definitions.{ArrowTypeDef, ProductTypeDef, TypeDefinition}
import aqua.js.{Conversions, ServiceHandler, TypeDefinitionJs}
import aqua.model.{AquaContext, ServiceModel}
import aqua.parser.expr.func.CallArrowExpr
import aqua.parser.lexer.{CallArrowToken, CollectionToken, LiteralToken, VarToken}
import aqua.parser.lift.Span
import aqua.raw.value.{CollectionRaw, LiteralRaw, ValueRaw, VarRaw}
import aqua.types.*
import cats.data.*
import cats.data.Validated.{invalid, invalidNec, invalidNel, valid, validNec, validNel}
import cats.effect.Concurrent
import cats.syntax.applicative.*
import cats.syntax.apply.*
import cats.syntax.flatMap.*
import cats.syntax.functor.*
import cats.syntax.semigroup.*
import cats.syntax.traverse.*
import cats.{Id, Semigroup, ~>}
import com.monovore.decline.Opts
import fs2.io.file.{Files, Path}
import scala.scalajs.js
// Description of a service with functions that return structures
case class JsonService(name: String, serviceId: String, functions: NonEmptyList[JsonFunction])
case class JsonFunction(name: String, result: js.Dynamic, resultType: Type)
object JsonService {
def findServices(
contexts: Chain[AquaContext],
services: List[JsonService]
): ValidatedNec[String, List[Service]] = {
services
.map(js =>
contexts
.collectFirstSome(_.services.get(js.name))
.map(sm => (js, sm))
.map(validNec)
.getOrElse(
Validated.invalidNec[String, ServiceModel](
s"There is no service '${js.name}' (described in json-service file) in aqua source or it is not exported. Check the spelling or see https://fluence.dev/docs/aqua-book/language/header/#export"
)
)
)
.sequence
.andThen { l =>
l.map { case (jsonService: JsonService, sm: ServiceModel) =>
val aquaFunctions: ValidatedNec[String, NonEmptyList[AquaFunction]] =
jsonService.functions.map { jf =>
sm.arrows(jf.name)
.map { case arr: ArrowType =>
if (arr.domain.isEmpty)
TypeValidator
.validateTypes(jf.name, arr.codomain, Some(ProductType(jf.resultType :: Nil)))
.map { _ =>
new AquaFunction {
override def fnName: String = jf.name
override def handler: ServiceHandler = _ => {
val converted = arr.codomain.toList match {
case h :: _ =>
Conversions.ts2aqua(jf.result, TypeDefinitionJs(TypeDefinition(h)))
case Nil =>
Conversions.ts2aqua(
jf.result,
TypeDefinitionJs(TypeDefinition(NilType))
)
}
js.Promise.resolve(converted)
}
override def arrow: ArrowTypeDef =
ArrowTypeDef(ProductTypeDef(NilType), ProductTypeDef(arr.codomain))
}
}
else
invalidNec(s"Json service '${jf.name}' cannot have any arguments")
}
.getOrElse(
Validated.invalidNec[String, AquaFunction](
s"There is no function '${jf.name}' in service '${jsonService.name}' in aqua source. Check your 'json-service' options"
)
)
}.sequence
aquaFunctions.map(funcs => Service(jsonService.serviceId, funcs))
}.sequence
}
}
}

View File

@ -1,87 +0,0 @@
package aqua.run
import aqua.ArgOpts.jsonFromFileOpts
import aqua.builder.ArgumentGetter
import aqua.js.JsonEncoder
import aqua.parser.expr.func.CallArrowExpr
import aqua.parser.lexer.{CallArrowToken, CollectionToken, LiteralToken, VarToken}
import aqua.parser.lift.Span
import aqua.raw.value.{CollectionRaw, LiteralRaw, ValueRaw, VarRaw}
import aqua.types.*
import cats.data.*
import cats.data.Validated.{invalid, invalidNec, invalidNel, valid, validNec, validNel}
import cats.effect.Concurrent
import cats.syntax.applicative.*
import cats.syntax.apply.*
import cats.syntax.flatMap.*
import cats.syntax.functor.*
import cats.syntax.semigroup.*
import cats.syntax.traverse.*
import cats.{Id, Semigroup, ~>}
import com.monovore.decline.Opts
import fs2.io.file.{Files, Path}
import scala.scalajs.js
object JsonServiceOpts {
def jsonServiceOpt[F[_]: Files: Concurrent]
: Opts[F[ValidatedNec[String, NonEmptyList[JsonService]]]] = {
jsonFromFileOpts("json-service", "Path to file that describes service with JSON result", "j")
.map(b =>
b.map { case a: ValidatedNec[String, NonEmptyList[(Path, js.Dynamic)]] =>
a.andThen { results =>
results.map { case (path, res) =>
val name = res.name
val serviceId = res.serviceId
val functionsRaw = res.functions
if (js.isUndefined(name) || js.typeOf(name) != "string")
invalidNec(s"No name in JSON service '$path' or it is not a string")
else if (js.isUndefined(serviceId) || js.typeOf(serviceId) != "string")
invalidNec(s"No serviceId in JSON service '$path' or it is not a string")
else if (js.isUndefined(functionsRaw) || !js.Array.isArray(functionsRaw))
invalidNec(
s"'functions' field should exist and be an array in JSON service '$path'"
)
else {
val functionsV: ValidatedNec[String, List[JsonFunction]] = functionsRaw
.asInstanceOf[js.Array[js.Dynamic]]
.toList
.map { f =>
val fName = f.name
val fResult = f.result
if (js.isUndefined(fName) || js.typeOf(fName) != "string")
invalidNec(
s"One of the functions doesn't have a name or it is not a string in JSON service '$path'"
)
else if (js.isUndefined(fResult))
invalidNec(s"Function '$fName' don't have a result in '$path'")
else {
val funcName = fName.asInstanceOf[String]
JsonEncoder
.aquaTypeFromJson(funcName, fResult)
.map(t => JsonFunction(funcName, fResult, t))
}
}
.sequence
functionsV.andThen { fs =>
NonEmptyList
.fromList(fs)
.map(fNEL =>
validNec(
JsonService(name.asInstanceOf[String], serviceId.asInstanceOf[String], fNEL)
)
)
.getOrElse(
invalidNec(s"List of functions in '$name' service is empty in $path")
)
}
}
}.sequence
}
}
)
}
}

View File

@ -1,229 +0,0 @@
package aqua.run
import aqua.*
import aqua.ErrorRendering.showError
import aqua.backend.air.{AirBackend, FuncAirGen}
import aqua.backend.js.JavaScriptBackend
import aqua.backend.ts.TypeScriptBackend
import aqua.backend.Generated
import aqua.logging.LogFormatter
import aqua.definitions.{FunctionDef, TypeDefinition}
import aqua.builder.{ArgumentGetter, Finisher, ResultPrinter, Service}
import aqua.compiler.{AquaCompiled, AquaCompiler}
import aqua.files.{AquaFileSources, AquaFilesIO, FileModuleId}
import aqua.io.{AquaFileError, AquaPath, OutputPrinter, Prelude}
import aqua.js.*
import aqua.model.transform.{Transform, TransformConfig}
import aqua.model.{AquaContext, FuncArrow}
import aqua.parser.expr.func.CallArrowExpr
import aqua.parser.lexer.LiteralToken
import aqua.parser.lift.FileSpan
import aqua.raw.value.{ValueRaw, VarRaw}
import aqua.run.RunConfig
import aqua.run.RunOpts.transformConfig
import aqua.types.*
import cats.data.*
import cats.effect.*
import cats.effect.kernel.{Async, Clock}
import cats.effect.syntax.async.*
import cats.syntax.applicative.*
import cats.syntax.apply.*
import cats.syntax.flatMap.*
import cats.syntax.functor.*
import cats.syntax.list.*
import cats.syntax.monad.*
import cats.syntax.show.*
import cats.syntax.traverse.*
import cats.{Id, Monad, ~>}
import fs2.io.file.{Files, Path}
import scribe.Logging
import scala.concurrent.{ExecutionContext, Future, Promise}
import scala.scalajs.js
import scala.scalajs.js.JSConverters.*
import scala.scalajs.js.JSON
import scala.scalajs.js.annotation.*
object RunCommand extends Logging {
def createKeyPair(
sk: Option[Array[Byte]]
): Future[KeyPair] = {
sk.map { arr =>
val typedArr = js.typedarray.Uint8Array.from(arr.map(_.toShort).toJSArray)
KeyPair.fromEd25519SK(typedArr).toFuture
}.getOrElse(KeyPair.randomEd25519().toFuture)
}
private def createGetter(value: VarRaw, arg: js.Dynamic, argType: Type): ArgumentGetter = {
val converted = Conversions.ts2aqua(arg, TypeDefinitionJs(TypeDefinition(argType)))
ArgumentGetter(value.copy(baseType = argType), converted)
}
// Creates getter services for variables. Return an error if there is no variable in services
// and type of this variable couldn't be optional
private def getGettersForVars(
vars: List[(String, Type)],
argGetters: Map[String, VarJson]
): ValidatedNec[String, List[ArgumentGetter]] = {
vars.map { (n, argType) =>
val argGetterOp = argGetters.get(n)
(argGetterOp, argType) match {
case (None, _) => Validated.invalidNec(s"Unexcepted. There is no service for '$n' argument")
// BoxType could be undefined, so, pass service that will return 'undefined' for this argument
case (Some(s), _: BoxType) if s._2 == js.undefined =>
Validated.validNec(createGetter(s._1, s._2, argType) :: Nil)
case (Some(s), _) if s._2 == js.undefined =>
Validated.invalidNec(
s"Argument '$n' is missing. Expected argument '$n' of type '$argType'"
)
case (Some(s), _) =>
Validated.validNec(createGetter(s._1, s._2, argType) :: Nil)
}
}.reduceOption(_ combine _).getOrElse(Validated.validNec(Nil))
}
def resultVariableNames(funcCallable: FuncArrow, name: String): List[String] =
funcCallable.arrowType.codomain.toList.zipWithIndex.map { case (t, idx) =>
name + idx
}
/**
* Runs a function that is located in `input` file with FluenceJS SDK. Returns no output
* @param func
* function name
* @param input
* path to an aqua code with a function
* @param imports
* the sources the input needs
*/
def run[F[_]: Files: AquaIO: Async](
func: CliFunc,
input: Option[AquaPath],
imports: List[Path],
runConfig: RunConfig,
// services that will pass arguments to air
argumentGetters: Map[String, VarJson],
// builtin services for aqua run, for example: Console, FileSystem, etc
services: List[Service],
jsonServices: List[JsonService],
plugins: List[String],
transformConfig: TransformConfig
): F[ValidatedNec[String, Unit]] = {
val funcCompiler = new FuncCompiler[F](input, imports, transformConfig)
for {
prelude <- Prelude.init[F](true)
contextV <- funcCompiler.compile(prelude.importPaths, true)
callResult <- Clock[F].timed {
contextV.andThen { context =>
FuncCompiler
.findFunction(context, func)
.andThen(callable =>
JsonService
.findServices(context, jsonServices)
.map(jsonServices => (callable, jsonServices))
)
}.andThen { case (funcCallable, jsonServices) =>
val resultNames = resultVariableNames(funcCallable, runConfig.resultName)
val resultPrinterService =
ResultPrinter(
runConfig.resultPrinterServiceId,
runConfig.resultPrinterName,
resultNames
)
val promiseFinisherService =
Finisher(runConfig.finisherServiceId, runConfig.finisherFnName)
val vars = func.args
.zip(funcCallable.arrowType.domain.toList)
.collect { case (VarRaw(n, _), argType) =>
(n, argType)
}
.distinctBy(_._1)
getGettersForVars(vars, argumentGetters).andThen { getters =>
val gettersTags = getters.map(s => s.callTag())
val preparer =
new CallPreparer(
func,
funcCallable,
gettersTags,
resultPrinterService.callTag,
promiseFinisherService.callTag(),
runConfig,
transformConfig
)
preparer.prepare().map { info =>
FuncCaller.funcCall[F](
info.name,
info.air,
info.definitions,
info.config,
resultPrinterService,
promiseFinisherService,
services ++ jsonServices,
getters,
plugins
)
}
}
} match {
case Validated.Valid(f) =>
f
case i @ Validated.Invalid(_) => i.pure[F]
}
}
(callTime, result) = callResult
} yield {
logger.debug(s"Call time: ${callTime.toMillis}ms")
result
}
}
private val builtinServices =
aqua.builder
.Console() :: aqua.builder.IPFSUploader("ipfs") :: aqua.builder.DeployHelper() :: Nil
/**
* Executes a function with the specified settings
* @param common
* common settings
* @param funcName
* function name
* @param inputPath
* path to a file with a function
* @param imports
* imports that must be specified for correct compilation
* @param args
* arguments to pass into a function
* @param argumentGetters
* services to get argument if it is a variable
* @param services
* will be registered before calling for correct execution
* @return
*/
def execRun[F[_]: Async](
runInfo: RunInfo
): F[ValidatedNec[String, Unit]] = {
val common = runInfo.common
LogFormatter.initLogger(Some(common.logLevel.compiler))
implicit val aio: AquaIO[F] = new AquaFilesIO[F]
RunCommand
.run[F](
runInfo.func,
runInfo.input,
runInfo.imports,
RunConfig(
common
),
runInfo.argumentGetters,
runInfo.services ++ builtinServices,
runInfo.jsonServices,
runInfo.pluginsPaths,
transformConfig(common.on, common.constants, common.flags.noXor, common.flags.noRelay)
)
}
}

View File

@ -1,114 +0,0 @@
package aqua.run
import aqua.*
import aqua.builder.{ArgumentGetter, Service}
import aqua.io.{AquaPath, RelativePath}
import aqua.model.transform.TransformConfig
import aqua.model.{LiteralModel, ValueModel, VarModel}
import aqua.parser.expr.func.CallArrowExpr
import aqua.parser.lexer.{LiteralToken, VarToken}
import aqua.parser.lift.LiftParser.Implicits.idLiftParser
import aqua.parser.lift.Span
import aqua.logging.LogFormatter
import aqua.raw.ConstantRaw
import aqua.raw.value.{LiteralRaw, ValueRaw, VarRaw}
import aqua.run.plugin.Plugin
import aqua.types.BottomType
import cats.data.*
import cats.data.Validated.{invalid, invalidNec, valid, validNec, validNel}
import cats.effect.kernel.Async
import cats.effect.{Concurrent, ExitCode, IO}
import cats.syntax.applicative.*
import cats.syntax.apply.*
import cats.syntax.flatMap.*
import cats.syntax.functor.*
import cats.{Id, Monad, ~>}
import com.monovore.decline.{Command, Opts}
import fs2.io.file.{Files, Path}
import scribe.Logging
import java.util.Base64
import scala.concurrent.ExecutionContext
import scala.scalajs.js
import scala.scalajs.js.JSON
object RunOpts extends Logging {
val OnPeerConst = "ON_PEER"
// Default transform config with `onPeer` constant
def transformConfig(
onPeer: Option[String],
constants: List[ConstantRaw],
noXor: Boolean,
noRelay: Boolean
): TransformConfig = {
val tc = TransformConfig(
constants =
onPeer.map(s => ConstantRaw(OnPeerConst, LiteralRaw.quote(s), false)).toList ++ constants,
wrapWithXor = !noXor
)
tc.copy(relayVarName = tc.relayVarName.filterNot(_ => noRelay))
}
def runOptsCompose[F[_]: Files: Concurrent]
: Opts[F[ValidatedNec[String, (Option[AquaPath], List[Path], FuncWithData, Option[NonEmptyList[JsonService]], List[String])]]] = {
(
AppOpts.wrapWithOption(AppOpts.inputOpts[F]),
AppOpts.importOpts[F],
ArgOpts.funcWithArgsOpt[F],
AppOpts.wrapWithOption(JsonServiceOpts.jsonServiceOpt),
AppOpts.wrapWithOption(Plugin.opt)
).mapN { case (inputF, importF, funcWithArgsF, jsonServiceOp, pluginsOp) =>
for {
inputV: ValidatedNec[String, Option[AquaPath]] <-
inputF.map(_.map(_.map(p => Option(RelativePath(p))))).getOrElse {
validNec[String, Option[AquaPath]](None).pure[F]
}
importV <- importF
funcWithArgsV <- funcWithArgsF
jsonServiceV <- jsonServiceOp
.map(_.map(_.map(js => Some(js))))
.getOrElse(validNec[String, Option[NonEmptyList[JsonService]]](None).pure[F])
pluginsPathsV <- pluginsOp.getOrElse(validNec[String, List[String]](Nil).pure[F])
} yield {
(inputV, importV, funcWithArgsV, jsonServiceV, pluginsPathsV).mapN { case (i, im, f, j, p) =>
(i, im, f, j, p)
}
}
}
}
def runOptions[F[_]: AquaIO: Async]: SubCommandBuilder[F] =
SubCommandBuilder.applyF(
name = "run",
header = "Run Aqua code",
(
GeneralOpts.runOpt,
runOptsCompose[F]
).mapN {
case (
common,
optionsF
) =>
LogFormatter.initLogger(Some(common.logLevel.compiler))
optionsF.map(
_.map { case (input, imps, funcWithArgs, services, pluginsPaths) =>
RunInfo(
common,
funcWithArgs.func,
input,
imps,
funcWithArgs.getters,
Nil,
services.map(_.toList).getOrElse(Nil),
pluginsPaths
)
}
)
}
)
def runCommand[F[_]: Files: AquaIO: Async]: Command[F[ValidatedNec[String, Unit]]] =
runOptions.command
}

View File

@ -1,149 +0,0 @@
package aqua.run.plugin
import aqua.js.{CallJsFunction, FluencePeer, ServiceHandler}
import aqua.run.JsonService
import aqua.run.plugin.Plugin.toPromise
import aqua.types.TopType
import aqua.definitions.*
import cats.data.{NonEmptyList, ValidatedNec}
import cats.effect.Concurrent
import cats.syntax.applicative.*
import cats.syntax.flatMap.*
import cats.syntax.functor.*
import cats.syntax.traverse.*
import cats.data.Validated.{invalid, invalidNec, valid, validNec, validNel}
import com.monovore.decline.Opts
import fs2.io.file.{Files, Path}
import scalajs.js
import scala.concurrent.{ExecutionContext, Future}
import scala.scalajs.js.Promise
case class Function(name: String, closure: js.Function)
case class Plugin(name: String, functions: List[Function]) {
def register(peer: FluencePeer): Unit = {
val (handlers, funcTypes) = functions.map { f =>
// get arguments types as TopType
val argCount = f.closure.length
val fields = Range(0, argCount).toList.map { i => ("arg" + i, TopTypeDef) }
val arrowType =
ArrowTypeDef(LabeledProductTypeDef(fields), UnlabeledProductTypeDef(TopTypeDef :: Nil))
val fType = (f.name, arrowType)
// handlers for registering
val h: ServiceHandler = args => {
val argsList = Range(0, argCount).toList.map { i =>
args(i)
}
val res = f.closure.call(this.asInstanceOf[js.Any], argsList: _*)
toPromise(res)
}
((f.name, h), fType)
}.unzip
CallJsFunction.registerService(
peer,
name,
handlers,
ServiceDef(Some(name), LabeledProductTypeDef(funcTypes), "")
)
}
}
object Plugin {
private def fileExt(p: Path): String =
p.fileName.toString.split('.').toList.lastOption.getOrElse("")
def pathToMjsFilesList[F[_]: Files: Concurrent](str: String): F[ValidatedNec[String, List[String]]] = {
val path = Path(str).absolute
Files[F]
.exists(path)
.flatMap { exists =>
if (exists)
Files[F].isRegularFile(path).flatMap { isFile =>
if (isFile) {
if (fileExt(path) == "mjs") {
validNec(path.toString :: Nil).pure[F]
} else {
invalidNec(s"If path '$str' is a file, it must be with '.mjs' extension")
.pure[F]
}
} else {
Files[F]
.list(path)
.evalMap { ps =>
val psAbs = ps.absolute
for {
isFile <- Files[F].isRegularFile(ps)
files <-
if (isFile) {
if (fileExt(ps) == "mjs") (psAbs :: Nil).pure[F]
else Nil.pure[F]
} else if (ps.fileName.toString != "node_modules") {
Files[F].list(psAbs).filter(pp => fileExt(pp) == "mjs").compile.toList
} else {
Nil.pure[F]
}
} yield {
files
}
}
.compile
.toList
.map(_.flatten.map(_.absolute.toString))
.map(validNec)
}
}
else {
invalidNec(s"There is no path '$str'").pure[F]
}
}
}
def opt[F[_]: Files: Concurrent]: Opts[F[ValidatedNec[String, List[String]]]] = {
Opts
.options[String]("plugin", "[experimental] Path to a directory with JS plugins", "", "path")
.map { strs =>
strs.toList.map(s => pathToMjsFilesList(s)).sequence.map(_.sequence.map(_.flatten))
}
}
def getPlugins(paths: List[String])(implicit
ec: ExecutionContext
): Future[List[Plugin]] =
paths.map(p => getPlugin(p)).sequence.map(_.flatten)
private def toPromise(arg: js.Dynamic): js.Promise[js.Dynamic] = {
if (js.typeOf(arg) == "object" && js.typeOf(arg.`then`) == "function")
arg.asInstanceOf[js.Promise[js.Dynamic]]
else js.Promise.resolve(arg)
}
def getPlugin(path: String)(implicit
ec: ExecutionContext
): Future[List[Plugin]] = {
for {
file <- js.`import`[js.Dynamic](path).toFuture
plugin <- {
if (js.typeOf(file.plugins) == "function") {
val res = file.applyDynamic("plugins")()
toPromise(res).toFuture.map(_.asInstanceOf[js.Dictionary[js.Dictionary[js.Any]]])
} else {
Future(js.Dictionary[js.Dictionary[js.Any]]())
}
}
} yield {
plugin.map { case (k, v) =>
val functions = v.map { case (kf, vf) =>
Function(kf, vf.asInstanceOf[js.Function])
}.toList
Plugin(k, functions)
}.toList
}
}
}

View File

@ -1,249 +0,0 @@
package aqua.script
import aqua.*
import aqua.ArgOpts.{dataFileOrStringOpt, funcOpt, funcWithArgsOpt}
import aqua.backend.Generated
import aqua.backend.air.{AirBackend, AirGen, FuncAirGen}
import aqua.builder.ArgumentGetter
import aqua.compiler.AquaCompiler
import aqua.js.VarJson
import aqua.io.{PackagePath, Prelude, RelativePath}
import aqua.ipfs.js.IpfsApi
import aqua.keypair.KeyPairShow.show
import aqua.model.transform.{Transform, TransformConfig}
import aqua.model.{AquaContext, FuncArrow, LiteralModel}
import aqua.parser.lift.FileSpan
import aqua.raw.ops.{Call, CallArrowRawTag}
import aqua.raw.value.{LiteralRaw, ValueRaw, VarRaw}
import aqua.res.{AquaRes, FuncRes}
import aqua.run.RunOpts.logger
import aqua.run.{
CliFunc,
FuncCompiler,
GeneralOptions,
GeneralOpts,
RunCommand,
RunConfig,
RunOpts
}
import aqua.types.{ArrowType, LiteralType, NilType, ScalarType}
import cats.data.*
import cats.data.Validated.{invalid, invalidNec, valid, validNec, validNel}
import cats.effect.kernel.{Async, Clock}
import cats.effect.{Concurrent, ExitCode, Resource, Sync}
import cats.syntax.applicative.*
import cats.syntax.apply.*
import cats.syntax.flatMap.*
import cats.syntax.functor.*
import cats.syntax.show.*
import cats.syntax.traverse.*
import cats.{Applicative, Monad}
import com.monovore.decline.{Command, Opts}
import fs2.io.file.{Files, Path}
import scribe.Logging
import scala.concurrent.ExecutionContext
import scala.scalajs.js
object ScriptOpts extends Logging {
val ScriptAqua = "aqua/script.aqua"
val AddFuncName = "schedule"
val RemoveFuncName = "remove"
val ListFuncName = "list"
case class FuncWithLiteralArgs(func: CliFunc, args: List[LiteralRaw])
// Func with only literal arguments (strings, booleans or numbers)
def funcWithLiteralsOpt[F[_]: Files: Concurrent]
: Opts[F[ValidatedNec[String, FuncWithLiteralArgs]]] = {
(dataFileOrStringOpt[F], funcOpt).mapN { case (dataF, func) =>
dataF.map { dataV =>
dataV.andThen { data =>
resolveOnlyLiteralsFromData(func.args, data).map { literals =>
FuncWithLiteralArgs(func, literals)
}
}
}
}
}
private def resolveOnlyLiteralsFromData(
args: List[ValueRaw],
data: Option[js.Dynamic]
): ValidatedNec[String, List[LiteralRaw]] = {
val literals = args.map {
case l: LiteralRaw => validNec(l) // TODO handle CollectionRaw?
case v @ VarRaw(name, _) =>
data.map { d =>
val arg = d.selectDynamic(name)
js.typeOf(arg) match {
case "number" => validNec(LiteralRaw(arg.toString, LiteralType.number))
case "string" => validNec(LiteralRaw(arg.toString, LiteralType.string))
case "boolean" => validNec(LiteralRaw(arg.toString, LiteralType.bool))
case t =>
invalidNec(
s"Scheduled script functions support 'string', 'boolean' and 'number' argument types only"
)
}
}.getOrElse(invalidNec(s"There is no '$name' argument in data"))
case _ =>
invalidNec(
s"Scheduled script functions support 'string', 'boolean' and 'number' argument types only"
)
}
literals.traverse(identity)
}
def scriptOpt[F[_]: Async: AquaIO]: Command[F[ValidatedNec[String, Unit]]] =
CommandBuilder(
name = "script",
header = "Manage scheduled scripts",
NonEmptyList(add, list :: remove :: Nil)
).command
def intervalOpt: Opts[Option[Int]] =
AppOpts.wrapWithOption(
Opts
.option[Int]("interval", "Indicating how often the script will run in seconds", "n")
)
def scriptIdOpt: Opts[String] =
Opts
.option[String]("script-id", "Script id to remove", "c")
def generateAir(callable: FuncArrow, transformConfig: TransformConfig): String = {
val funcRes = Transform.funcRes(callable, transformConfig).value
AirGen(funcRes.body).generate.show
}
private def commonScriptOpts = GeneralOpts.commonOpt(false, true, true)
private def compileAir[F[_]: Async: AquaIO](
input: Path,
imports: List[Path],
funcWithArgs: FuncWithLiteralArgs
): F[ValidatedNec[String, String]] = {
val tConfig = TransformConfig(relayVarName = None, wrapWithXor = false)
val funcCompiler =
new FuncCompiler[F](
Option(RelativePath(input)),
imports,
tConfig
)
val funcName = funcWithArgs.func.name
for {
prelude <- Prelude.init[F](true)
contextV <- funcCompiler.compile(prelude.importPaths)
wrappedBody = CallArrowRawTag.func(funcName, Call(funcWithArgs.func.args, Nil)).leaf
result = contextV
.andThen(context => FuncCompiler.findFunction(context, funcWithArgs.func))
.map { callable =>
generateAir(
FuncArrow(
funcName + "_scheduled",
wrappedBody,
ArrowType(NilType, NilType),
Nil,
Map(funcName -> callable),
Map.empty,
None
),
tConfig
)
}
} yield result
}
def add[F[_]: Async: AquaIO]: SubCommandBuilder[F] =
SubCommandBuilder.applyF(
name = "add",
header = "Upload aqua function as a scheduled script.",
(
commonScriptOpts,
scheduleOptsCompose[F],
intervalOpt
).mapN { (common, optionsF, intervalOp) =>
val res: F[ValidatedNec[String, RunInfo]] = optionsF
.flatMap(
_.map { case (input, imports, funcWithArgs) =>
val intervalArg =
intervalOp
.map(i => LiteralRaw(i.toString, LiteralType.number))
.getOrElse(ValueRaw.Nil)
val someRes: F[ValidatedNec[String, RunInfo]] = for {
scriptV <- compileAir(input, imports, funcWithArgs)
result: ValidatedNec[String, RunInfo] = scriptV.map { script =>
val scriptVar = VarRaw("script", ScalarType.string)
RunInfo(
common,
CliFunc(AddFuncName, scriptVar :: intervalArg :: Nil),
Option(PackagePath(ScriptAqua)),
Nil,
Map(
"script" -> VarJson(
scriptVar,
// hack, cannot create unnamed Dynamic
// TODO: fix it
scalajs.js.Dynamic.literal("script" -> script).selectDynamic("script")
)
)
)
}
} yield {
result
}
someRes
}.fold(
errs => Validated.Invalid[NonEmptyChain[String]](errs).pure[F],
identity
)
)
res
}
)
def scheduleOptsCompose[F[_]: Files: Async]
: Opts[F[ValidatedNec[String, (Path, List[Path], FuncWithLiteralArgs)]]] = {
(AppOpts.inputOpts[F], AppOpts.importOpts[F], funcWithLiteralsOpt[F]).mapN {
case (inputF, importF, funcWithLiteralsF) =>
for {
inputV <- inputF
importV <- importF
funcWithLiteralsV <- funcWithLiteralsF
} yield {
(inputV, importV, funcWithLiteralsV).mapN { case (i, im, f) =>
(i, im, f)
}
}
}
}
// Removes scheduled script from a node
def remove[F[_]: Async]: SubCommandBuilder[F] =
SubCommandBuilder.valid[F](
"remove",
"Remove a script from a remote peer",
(
commonScriptOpts,
scriptIdOpt
).mapN { (common, scriptId) =>
RunInfo(
common,
CliFunc(RemoveFuncName, LiteralRaw.quote(scriptId) :: Nil),
Option(PackagePath(ScriptAqua))
)
}
)
// Print all scheduled scripts
def list[F[_]: Async]: SubCommandBuilder[F] =
SubCommandBuilder
.simple[F]("list", "Print all scheduled scripts", PackagePath(ScriptAqua), ListFuncName)
}

View File

@ -1,150 +0,0 @@
package aqua
import aqua.js.JsonEncoder
import aqua.types.{ArrayType, LiteralType, OptionType, StructType}
import cats.Id
import org.scalatest.flatspec.AnyFlatSpec
import org.scalatest.matchers.should.Matchers
import cats.data.{NonEmptyList, NonEmptyMap}
class JsonEncoderSpec extends AnyFlatSpec with Matchers {
"json encoder" should "get type from json" in {
val json = scalajs.js.JSON.parse("""{
|"arr2": [{
| "a": "fef",
| "b": [1,2,3,4],
| "c": "erfer"
| },{
| "a": "ferfer",
| "b": [1,2,3,4],
| "c": "erfer"
| }, {
| "a": "as",
| "d": "gerrt"
| }]
|} """.stripMargin)
val res = JsonEncoder.aquaTypeFromJson("n", json)
res.isValid shouldBe true
val elType = StructType(
"",
NonEmptyMap.of(
("a", LiteralType.string),
("b", ArrayType(LiteralType.number)),
("c", OptionType(LiteralType.string)),
("d", OptionType(LiteralType.string))
)
)
res.toOption.get shouldBe StructType("", NonEmptyMap.of(("arr2", ArrayType(elType))))
}
"json encoder" should "get type from json 1" in {
val json = scalajs.js.JSON.parse("""{
|"arr2": [{
| "b": [1,2,3,4]
| },{
| "b": [1,2,3,4]
| }, {
| "b": "gerrt"
| }]
|} """.stripMargin)
val res = JsonEncoder.aquaTypeFromJson("n", json)
res.isValid shouldBe false
}
"json encoder" should "get type from json 2" in {
val json =
scalajs.js.JSON.parse(
"""{
|"arr1": [{"a": [{"c": "", "d": 123}, {"c": ""}], "b": ""}, {"b": ""}],
|"arr2": [1,2,3,4],
|"arr3": ["fre", "grt", "rtgrt"],
|"str": "egrerg",
|"num": 123
|} """.stripMargin
)
val res = JsonEncoder.aquaTypeFromJson("n", json)
res.isValid shouldBe true
val innerElType = StructType(
"",
NonEmptyMap.of(
("c", LiteralType.string),
("d", OptionType(LiteralType.number))
)
)
val elType = StructType(
"",
NonEmptyMap.of(
("a", ArrayType(innerElType)),
("b", LiteralType.string)
)
)
val t = StructType(
"",
NonEmptyMap.of(
("arr1", ArrayType(elType)),
("arr2", ArrayType(LiteralType.number)),
("arr3", ArrayType(LiteralType.string)),
("str", LiteralType.string),
("num", LiteralType.number)
)
)
res.toOption.get shouldBe t
}
"json encoder" should "get type from json 3" in {
val json = scalajs.js.JSON.parse("""{
|"arr2": [{
| "b": [1,2,3,4]
| },{
| "b": [1,2,3,4]
| }, {
| "b": "gerrt"
| }]
|} """.stripMargin)
val res = JsonEncoder.aquaTypeFromJson("n", json)
res.isValid shouldBe false
}
"json encoder" should "get type from json 4" in {
val json =
scalajs.js.JSON.parse(
"""{
|"arr4": [{"a": "", "b": {"c": "", "d": [1,2,3,4]}}, {"a": ""}]
|} """.stripMargin
)
val res = JsonEncoder.aquaTypeFromJson("n", json)
res.isValid shouldBe true
val arr4InnerType = OptionType(
StructType(
"",
NonEmptyMap.of(
("c", LiteralType.string),
("d", ArrayType(LiteralType.number))
)
)
)
val arr4ElType = StructType(
"",
NonEmptyMap.of(
("a", LiteralType.string),
("b", arr4InnerType)
)
)
val t = StructType(
"",
NonEmptyMap.of(
("arr4", ArrayType(arr4ElType))
)
)
res.toOption.get shouldBe t
}
}

View File

@ -1,207 +0,0 @@
package aqua
import aqua.run.TypeValidator
import aqua.types.{ArrayType, LiteralType, OptionType, ScalarType, StructType, Type}
import org.scalatest.flatspec.AnyFlatSpec
import org.scalatest.matchers.should.Matchers
import cats.data.{NonEmptyList, NonEmptyMap, ValidatedNec}
class TypeValidatorSpec extends AnyFlatSpec with Matchers {
val aquaType = StructType(
"some",
NonEmptyMap.of(
("field1", OptionType(ArrayType(ScalarType.u8))),
("field2", OptionType(ArrayType(OptionType(ScalarType.i32)))),
("field3", ArrayType(ArrayType(ArrayType(ScalarType.i64)))),
(
"field4",
OptionType(
StructType(
"some2",
NonEmptyMap.of(
("innerfield1", OptionType(ScalarType.u32)),
("innerfield2", ArrayType(ScalarType.i16))
)
)
)
)
)
)
private def validate(aquaType: Type, jsonType: Type) = {
TypeValidator.validateTypes("some", aquaType, Some(jsonType))
}
"type validator" should "return invalid result if check same type" in {
val res = validate(aquaType, aquaType)
res.isValid shouldBe false
}
"type validator" should "return invalid result if there is no field" in {
val res = validate(
StructType(
"some",
NonEmptyMap.of(
("field1", ScalarType.u8),
("field2", ArrayType(ScalarType.string))
)
),
StructType(
"some",
NonEmptyMap.of(
("field1", ScalarType.u8)
)
))
res.isValid shouldBe false
}
"type validator" should "validate optional types properly" in {
val aquaOptionalArrArrType = OptionType(ArrayType(ArrayType(ScalarType.u8)))
val aquaOptionalArrType = OptionType(ArrayType(ScalarType.u8))
val aquaOptionalType = OptionType(ScalarType.u8)
val res1 = validate(aquaOptionalType, LiteralType.number)
res1.isValid shouldBe true
val res2 = validate(aquaOptionalArrType, ArrayType(LiteralType.number))
res2.isValid shouldBe true
val res3 = validate(aquaOptionalArrArrType, ArrayType(ArrayType(LiteralType.number)))
res3.isValid shouldBe true
val res1Invalid = validate(aquaOptionalType, ArrayType(LiteralType.number))
res1Invalid.isValid shouldBe false
}
"type validator" should "validate array types properly" in {
val aquaArrArrArrType = ArrayType(ArrayType(ArrayType(ScalarType.u8)))
val aquaArrArrType = ArrayType(ArrayType(ScalarType.u8))
val aquaArrType = ArrayType(ScalarType.u8)
val res1 = validate(aquaArrType, ArrayType(LiteralType.number))
res1.isValid shouldBe true
val res2 = validate(aquaArrArrType, ArrayType(ArrayType(LiteralType.number)))
res2.isValid shouldBe true
val res3 = validate(aquaArrArrArrType, ArrayType(ArrayType(ArrayType(LiteralType.number))))
res3.isValid shouldBe true
val res1invalid = validate(aquaArrType, LiteralType.number)
res1invalid.isInvalid shouldBe true
val res2invalid = validate(aquaArrArrType, ArrayType(LiteralType.number))
res2invalid.isInvalid shouldBe true
}
"type validator" should "validate options with arrays types properly" in {
val aquaOptArrOptArrType = OptionType(ArrayType(OptionType(ArrayType(ScalarType.u8))))
val res1 = validate(aquaOptArrOptArrType, ArrayType(ArrayType(LiteralType.number)))
res1.isValid shouldBe true
val res1invalid =
validate(aquaOptArrOptArrType, ArrayType(ArrayType(ArrayType(LiteralType.number))))
res1invalid.isValid shouldBe false
val res2invalid =
validate(aquaOptArrOptArrType, ArrayType(ArrayType(ArrayType(ArrayType(LiteralType.number)))))
res2invalid.isValid shouldBe false
}
"type validator" should "validate complex types properly" in {
val res1 = validate(
aquaType,
StructType(
"some",
NonEmptyMap.of(
("field1", ArrayType(LiteralType.number)),
("field2", ArrayType(LiteralType.number)),
("field3", ArrayType(ArrayType(ArrayType(LiteralType.number)))),
(
"field4",
StructType(
"some2",
NonEmptyMap.of(
("innerfield1", LiteralType.number),
("innerfield2", ArrayType(LiteralType.number))
)
)
)
)
)
)
res1.isValid shouldBe true
}
"type validator" should "return invalid if there is no field" in {
val structType = StructType(
"some",
NonEmptyMap.of(
("field1", ScalarType.u8),
("field2", ScalarType.string),
("field3", OptionType(ScalarType.string))
)
)
val res1invalid = validate(
structType,
StructType(
"some",
NonEmptyMap.of(
("field2", LiteralType.string)
)
)
)
res1invalid.isValid shouldBe false
val res2invalid = validate(
structType,
StructType(
"some",
NonEmptyMap.of(
("field1", ScalarType.u8)
)
)
)
res2invalid.isValid shouldBe false
val res1 = validate(
structType,
StructType(
"some",
NonEmptyMap.of(
("field1", LiteralType.number),
("field2", LiteralType.string)
)
)
)
res1.isValid shouldBe true
validate(
structType,
StructType(
"some",
NonEmptyMap.of(
("field1", ScalarType.u8),
("field2", ScalarType.string),
("field3", ScalarType.string)
)
)
).isValid shouldBe true
}
"type validator" should "return invalid if there is one array when it must be two" in {
val leftType = StructType(
"some",
NonEmptyMap.of(
("arrr", OptionType(ArrayType(ArrayType(ScalarType.u8))))
)
)
val rightType = StructType(
"some",
NonEmptyMap.of(
("arrr", ArrayType(LiteralType.number))
)
)
validate(leftType, rightType).isInvalid shouldBe true
}
}

View File

@ -1,15 +0,0 @@
package aqua
import cats.data.ValidatedNec
import cats.effect.ExitCode
import cats.effect.kernel.Async
import cats.effect.std.Console
import com.monovore.decline.Opts
import fs2.io.file.{Files, Path}
import scala.concurrent.ExecutionContext
// Scala-specific options and subcommands
object PlatformOpts {
def opts[F[_]: Files: AquaIO: Async: Console]: Opts[F[ValidatedNec[String, Unit]]] = Opts.never
}

View File

@ -1,43 +0,0 @@
package aqua
import aqua.backend.ts.TypeScriptBackend
import aqua.files.AquaFilesIO
import aqua.logging.LogFormatter
import aqua.model.transform.TransformConfig
import cats.data.Validated
import cats.effect.{IO, IOApp, Sync}
import fs2.io.file.Path
import scribe.Level
object Test extends IOApp.Simple {
implicit val aio: AquaIO[IO] = new AquaFilesIO[IO]
override def run: IO[Unit] = {
scribe.Logger.root
.clearHandlers()
.clearModifiers()
.withHandler(formatter = LogFormatter.formatterWithFilename, minimumLevel = Some(Level.Info))
.replace()
for {
start <- IO(System.currentTimeMillis())
_ <- AquaPathCompiler
.compileFilesTo[IO](
Path("./aqua-src/antithesis.aqua"),
List(Path("./aqua")),
Option(Path("./target")),
TypeScriptBackend(false, "IFluenceClient$$"),
TransformConfig(wrapWithXor = false),
false
)
.map {
case Validated.Invalid(errs) =>
errs.map(System.err.println): Unit
case Validated.Valid(res) =>
res.map(println): Unit
}
_ <- IO.println("Compilation ends in: " + (System.currentTimeMillis() - start) + " ms")
} yield ()
}
}

Some files were not shown because too many files have changed in this diff Show More