Search is not available for this dataset
content
stringlengths
0
376M
<reponame>mlulaj/fuzzing # Example for Innovus: https://legacy.iis.ee.ethz.ch/~vlsi2/ex05/ex05.pdf --- file: pads.io left: "1" # Bottom to top top: "2" # Left to right right: "3" # Bottom to top bottom: "4" # Left to right # Note: In your scripts, you should specify instance array styles # i.e. hdl_instance_array_naming_style string (For Genus) instanceArray: "{{signal}}[{{idx}}]" padLine: | | (inst name = "{{padInst}}") # Side: {{side}}, Order: {{padIdx}} template: | |(globals | version = 3 | io_order = default |) |(iopad | (bottomleft | (inst name="corner_ll" cell="CORNER_EXAMPLE" ) | ) | (bottomright | (inst name="corner_lr" orientation=MY cell="CORNER_EXAMPLE" ) | ) | (topleft | (inst name="corner_ul" orientation=MX cell="CORNER_EXAMPLE" ) | ) | (topright | (inst name="corner_ur" cell="CORNER_EXAMPLE" ) | ) | (left |{{leftPads}} | ) | (right |{{rightPads}} | ) | (top |{{topPads}} | ) | (bottom |{{bottomPads}} | ) |)
<reponame>baptistepetit/cordic name: Main Branch Event on: push: branches: - main pull_request: branches: - main defaults: run: shell: bash jobs: poc-build: defaults: run: working-directory: poc runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 - name: Use Node.js 12.x uses: actions/setup-node@v1 with: node-version: 12.x - name: Install dependencies run: npm ci - name: Test building the application run: npm run build - name: Run the unit tests run: npm test - name: Save website artifacts uses: actions/upload-artifact@v2 with: name: demo-website path: ${{ github.workspace }}/poc/dist if-no-files-found: error gh-pages-deploy: needs: poc-build runs-on: ubuntu-latest if: ${{ github.event_name == 'push' && github.ref == 'refs/heads/main' }} steps: - uses: actions/checkout@v2 with: fetch-depth: 0 - run: | git config user.name github-actions git config user.email <EMAIL> - name: Setup independent git worktree to receive artifacts run: ./scripts/deploy_gh_pages_setup.sh "poc/dist" - name: Download website artifacts uses: actions/download-artifact@v2 with: name: demo-website path: ${{ github.workspace }}/poc/dist - name: Deploy gh-pages website run: ./scripts/deploy_gh_pages_push_and_clean.sh "poc/dist" fp-cppcheck: defaults: run: working-directory: fixed_point runs-on: ubuntu-20.04 steps: - uses: actions/checkout@v2 with: fetch-depth: 0 - name: Install cppcheck run: sudo apt-get install cppcheck -y - name: Run an analysis of the code run: make check fp-build: defaults: run: working-directory: fixed_point runs-on: ubuntu-20.04 steps: - uses: actions/checkout@v2 - name: Install googletest, google-mock run: > sudo apt-get install -y googletest libgtest-dev google-mock libgmock-dev - name: Compile the code run: make -j - name: Run the Unit Tests run: make test hw-build: defaults: run: working-directory: hardware_design runs-on: ubuntu-latest container: ghdl/vunit:gcc steps: - uses: actions/checkout@v2 - name: Run the Unit Tests run: python3 run.py
axi: files: - src/axi_pkg.sv - src/axi_test.sv - src/axi_intf.sv - src/axi_arbiter.sv - src/axi_address_resolver.sv - src/axi_to_axi_lite.sv - src/axi_lite_to_axi.sv - src/axi_lite_xbar.sv - src/axi_lite_cut.sv - src/axi_lite_multicut.sv - src/axi_lite_join.sv - src/axi_cut.sv - src/axi_multicut.sv - src/axi_join.sv - src/axi_modify_address.sv - src/axi_delayer.sv - src/axi_id_remap.sv
<reponame>c-rus/legoHDL name: build on: [push] jobs: setup: strategy: fail-fast: false matrix: platform: [ubuntu-latest, macos-latest, windows-latest] python-version: ['3.5', '3.x'] runs-on: ${{ matrix.platform }} steps: - uses: actions/checkout@v2 #setup python - uses: actions/[email protected] with: python-version: ${{ matrix.python-version }} - run: python -c "import sys; print(sys.version)" #install and configure legoHDL - run: pip install . #configure legohdl for use - run: printf 'y\n\nchase\ngedit\n~/demo/\ny\n' | legohdl #set some settings via command-line - run: legohdl config -"hdl-styling.default-language"=vhdl -"hdl-styling.alignment"=1 -"hdl-styling.auto-fit"=1 #create a new block - run: legohdl new library1.Lab0 #view blocks - run: legohdl list #link this vendor to the active-workspace - run: legohdl config -"vendor.uf-ece"=https://github.com/uf-eel4712c/uf-ece.git -"workspace.primary.vendors+=uf-ece" #view blocks - run: legohdl list #view vendors - run: legohdl list -vendor #install the available block from uf-ece vendor - run: legohdl install sample.mux_2x1 #collect data about mux_2x1 block and its design entity - run: legohdl info uf-ece.sample.mux_2x1 -more - run: legohdl get sample.mux_2x1:mux_2x1 -inst - run: legohdl get mux_2x1 -arch
<gh_stars>0 event_unit_flex: incdirs: [ ., ] files: [ event_unit_core.sv, event_unit_interface_mux.sv, event_unit_top.sv, soc_periph_fifo.sv, interc_sw_evt_trig.sv, hw_barrier_unit.sv, hw_mutex_unit.sv, hw_dispatch.sv ]
<reponame>BearerPipelineTest/google-ctf<gh_stars>1000+ # Human readable task name name: story # Long form description. description: |+ Please, tell me a beautiful story. # The flag flag: CTF{eb64749d08bd99b681f2bc75aa65eab35a80310f7426f6872ba869d244e37135} # Task category. (one of hardware, crypto, pwn, reversing, web, net, misc) category: crypto # === the fields below will be filled by SRE or automation === # Task label label: '' # URL for web challenges link: '' # host/port for non-web challenges host: 'story.2021.ctfcompetition.com 1337' # the URL for attachments, to be filled in by automation attachment: '' # is this challenge released? Will be set by SREs visible: false
<filename>.travis.yml language: scala # https://docs.travis-ci.com/user/notifications/#IRC-notification notifications: irc: channels: - "chat.freenode.net#lichess" on_success: change on_failure: always use_notice: true skip_join: true slack: rooms: - lichess:sVTqlE0OQNMPq1n6qRnVnfrz on_success: change on_failure: always email: false # https://docs.travis-ci.com/user/languages/java/#Testing-Against-Multiple-JDKs jdk: - oraclejdk8 env: - TRAVIS_NODE_VERSION="4.0.0" install: # http://austinpray.com/ops/2015/09/20/change-travis-node-version.html - rm -rf ~/.nvm && git clone https://github.com/creationix/nvm.git ~/.nvm && (cd ~/.nvm && git checkout `git describe --abbrev=0 --tags`) && source ~/.nvm/nvm.sh && nvm install "$TRAVIS_NODE_VERSION" - npm install -g gulp - git submodule update --init --recursive - ./ui/build - ./bin/build-deps.sh script: - sbt compile - sbt test
<filename>.github/workflows/shortIntegrationTests.yml<gh_stars>10-100 name: Short integration tests # Run the integration tests on one configuration (of the nightly matrix) on each # push to main. Should catch 95% of integration test breakages. Useful for # identifying the particular offending commit and emailing the commit author. # Note: currently the integration tests don't take that long to run (<1 min). # If, in the future, they take significantly longer (>~1 hour), we should # reconsider this. on: workflow_dispatch: push: branches: - main pull_request: types: [opened, synchronize, reopened] jobs: # Build CIRCT and run its tests using a Docker container with all the # integration testing prerequisite installed. build-circt: name: Build and Test runs-on: ubuntu-18.04 container: image: ghcr.io/circt/images/circt-integration-test:v10.2 strategy: # Keep the 'matrix' strategy with one data point to make it obvious that # this is one point in the overall matrix. matrix: build-assert: [ON] build-shared: [ON] build-type: [Release] compiler: - cc: clang cxx: clang++ steps: # Clone the CIRCT repo and its submodules. Do shallow clone to save clone # time. - name: Get CIRCT uses: actions/checkout@v2 with: fetch-depth: 1 submodules: true - name: ccache uses: hendrikmuhs/ccache-action@v1 with: max-size: 1G # -------- # Build and test CIRCT # -------- - name: Configure CIRCT env: CC: ${{ matrix.compiler.cc }} CXX: ${{ matrix.compiler.cxx }} BUILD_ASSERT: ${{ matrix.build-assert }} BUILD_SHARED: ${{ matrix.build-shared }} BUILD_TYPE: ${{ matrix.build-type }} run: | export PATH="/usr/lib/ccache:/usr/local/opt/ccache/libexec:$PATH" mkdir build && cd build cmake -GNinja ../llvm/llvm \ -DBUILD_SHARED_LIBS=$BUILD_SHARED \ -DCMAKE_BUILD_TYPE=$BUILD_TYPE \ -DCMAKE_C_COMPILER=$CC \ -DCMAKE_CXX_COMPILER=$CXX \ -DLLVM_CCACHE_BUILD=ON \ -DLLVM_ENABLE_ASSERTIONS=$BUILD_ASSERT \ -DLLVM_ENABLE_PROJECTS=mlir \ -DLLVM_EXTERNAL_PROJECTS=circt \ -DLLVM_EXTERNAL_CIRCT_SOURCE_DIR=.. \ -DLLVM_USE_LINKER=lld \ -DMLIR_ENABLE_BINDINGS_PYTHON=ON \ -DCIRCT_BINDINGS_PYTHON_ENABLED=ON - name: Test CIRCT run: | ninja -C build check-circt -j$(nproc) - name: Integration Test CIRCT run: | ninja -C build check-circt-integration -j$(nproc) - name: Ccache stats run: | ccache -s
name: testbench commands: - python ../../../tests/test_timing/generate_testbench.py outputs --width $array_width --height $array_height - mv outputs/Interconnect_tb.sv outputs/testbench.sv outputs: - testbench.sv
version: 2 jobs: fedora_bmake: working_directory: ~/universal-ctags docker: - image: docker.io/fedora:latest steps: - run: name: Install Git command: | dnf -y install git || : - checkout - run: name: Install build tools command: | dnf -y install gcc automake autoconf pkgconfig bmake aspell-devel aspell-en libxml2-devel jansson-devel libyaml-devel findutils || : - run: name: Build command: | bash ./autogen.sh MAKE=bmake ./configure --enable-debugging bmake -j 2 - run: name: Test command: | MAKE=bmake bmake check roundtrip CIRCLECI=1 centos_make: working_directory: ~/universal-ctags docker: - image: docker.io/centos:latest steps: - run: name: Install Git command: | yum -y install git || : - checkout - run: name: Install build tools # TODO: enable spell checker command: | yum -y install gcc automake autoconf pkgconfig make libxml2-devel jansson-devel libyaml-devel findutils || : - run: name: Build command: | bash ./autogen.sh ./configure --enable-debugging make -j 2 - run: name: Test command: | make check roundtrip CIRCLECI=1 workflows: version: 2 build_and_test: jobs: - fedora_bmake - centos_make
<reponame>BearerPipelineTest/google-ctf<filename>2021/quals/web-security-driven/challenge.yaml # Copyright 2021 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. apiVersion: kctf.dev/v1 kind: Challenge metadata: name: secdriven spec: deployed: true powDifficultySeconds: 0 network: public: true ports: - protocol: "TCP" targetPort: 443 healthcheck: enabled: true allowConnectTo: - secdriven-bot podTemplate: template: spec: containers: - name: challenge env: - name: DB_PORT value: "3306" - name: DB_HOST value: "10.59.0.2" - name: DB_DATABASE value: secdriven - name: DB_USER value: secdriven - name: DB_PASSWORD value: "" - name: ADMIN_USERNAME value: "4dm1n_1731" - name: ADMIN_PASSWORD value: "" - name: DB_CONNECTION_LIMIT value: "100" - name: APP_SECRET value: "" - name: APP_DOMAIN value: chall.secdriven.dev - name: APP_2DOMAIN value: secdrivencontent.dev - name: APP_MAX_FILE_SIZE value: "500" - name: UPLOAD_PATH value: "/mnt/disks/uploads" - name: LISTEN_PORT value: "443" volumeMounts: - name: uploads mountPath: /mnt/disks/uploads volumes: - name: uploads persistentVolumeClaim: claimName: secdriven-uploads persistentVolumeClaims: - secdriven-uploads --- apiVersion: networking.k8s.io/v1 kind: NetworkPolicy metadata: name: secdriven-sql-access namespace: default spec: podSelector: matchLabels: app: secdriven policyTypes: - Egress egress: - to: - ipBlock: cidr: 10.59.0.2/32
<gh_stars>10-100 name: Continuous Build on: push: branches: [master] pull_request: branches: [master] schedule: - cron: "0 5 * * *" # run at 5 AM UTC jobs: build-linux: runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 with: submodules: "recursive" - name: Install dependencies run: | sudo apt-get update sudo apt-get install --fix-missing libsdl2-dev libsdl2-image-dev - name: Setup Zig uses: goto-bus-stop/setup-zig@v1 with: version: master - name: Build all software run: | mkdir -p zig-out/bin mkdir -p zig-out/firmware zig build install - name: Run Testsuite run: zig build test - name: Build firmware run: zig build firmware
sudo: required dist: trusty language: python python: - "2.7" - "3.4" branches: only: - master before_install: - sudo apt-get -qq update # Icarus Verilog simulator - sudo apt-get install -qq -y iverilog # GHDL (0.33 or greater is required, not available in repository. # Install GHDL dependencies - sudo apt-get install -qq libgnat-4.8 - sudo apt-get install -qq libstdc++6 # Temporarily pull 0.33 .deb from sourceforge and install - wget http://netcologne.dl.sourceforge.net/project/ghdl-updates/Builds/ghdl-0.33/debian/ghdl_0.33-1ubuntu1_amd64.deb - sudo dpkg -i ghdl_0.33-1ubuntu1_amd64.deb # Nose code coverage - pip install -q nosexcover install: - python setup.py install script: - nosetests --with-doctest --with-xcoverage --with-xunit --cover-package=chiptools --cover-erase
<filename>.buildkite/pipeline_mflowgen.yml agents: { jobsize: "hours" } ############################################################################## # Use this to test a specific branch/commit: # Add to env: # NOV11: ee214ef77b827f969e4b5f056f5d866cf391be7a # Add to commands: # - pwd; git branch; git checkout $$NOV11 ############################################################################## # Note: "echo exit 13" prevents hang at genus/innovus prompt env: TEST: 'echo exit 13 | mflowgen/test/test_module.sh' steps: ############################################################################## # INDIVIDUAL TILE RUNS # Builds in dir e.g. mflowgen/full_chip/19-tile_array/16-Tile_MemCore - label: '250MHz PE synth 12m' commands: - $TEST --need_space 30G full_chip tile_array Tile_PE --steps synthesis --debug - .buildkite/pipelines/check_pe_area.sh - mflowgen/bin/buildcheck.sh full_chip/*tile_array/*Tile_PE --show-all-errors - wait: { continue_on_failure: true } # One step at a time + continue on failure - label: 'MemCore synth 17m' commands: - $TEST --need_space 30G full_chip tile_array Tile_MemCore --steps synthesis --debug - mflowgen/bin/buildcheck.sh full_chip/*tile_array/*Tile_MemCore --show-all-errors - wait: { continue_on_failure: true } # One step at a time + continue on failure
name: Deploy on: push: tags: - v* jobs: build: runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 - name: Set up Python 3.8 uses: actions/setup-python@v2 with: python-version: 3.8 - name: Install dependencies run: | sudo apt install -y verilator libgmp-dev libmpfr-dev libmpc-dev python -m pip install --upgrade pip pip install flake8 pytest pytest-cov pytest-pycodestyle fault pip install kratos # test optional dependency pip install -e . - name: Test with pytest run: | py.test --cov magma -v --cov-report term-missing tests pycodestyle magma/ - name: Install deploy packages shell: bash -l {0} run: | pip install twine - name: Upload to PyPI shell: bash -l {0} run: | source .travis/deploy.sh env: PYPI_PASSWORD: ${{ secrets.PYPI_PASSWORD }}
package: name: axi2per dependencies: axi_slice: { git: "https://github.com/pulp-platform/axi_slice.git", version: 1.1.3 } sources: # Level 0 - axi2per_req_channel.sv - axi2per_res_channel.sv # Level 1 - axi2per.sv
DesyTrackerRoot: enable: 'True' ForceWrite: 'False' DesyTrackerRunControl: CalMeanCount: 500 CalDacMin: 200 CalDacMax: 255 CalDacStep: 5 CalDacCount: 5 CalChanMin: 000 CalChanMax: 1023 DesyTracker: enable: 'True' AxiVersion: enable: 'True' ScratchPad: '0x000000' KpixDaqCore: enable: 'True' SysConfig: enable: 'True' RawDataMode: 'False' AutoReadDisable: 'False' # KpixEnable: '0x1' KpixClockGen: enable: 'True' ClkSelReadout: 19 ClkSelDigitize: 19 ClkSelAcquire: 31 ClkSelIdle: 19 ClkSelPrecharge: 599 SampleDelay: 0 SampleEdge: Fall AcquisitionControl: enable: 'True' ExtTrigSrc: Disabled ExtTimestampSrc: Disabled ExtAcquisitionSrc: EthAcquire ExtStartSrc: EthStart Calibrate: 'True' KpixAsicArray: enable: 'True' KpixAsic[*]: enable: 'False' CfgAutoReadDisable: 'False' CfgForceTemp: 'False' CfgDisableTemp: 'False' CfgAutoStatusReadEn: 'True' TimeResetOn: 11 TimeResetOff: 900 TimeOffsetNullOff: 1000 TimeLeakageNullOff: 5 TimeDeselDelay: 18 TimeBunchClkDelay: 3000 TimeDigitizeDelay: 8 TimePowerUpOn: 11 TimePowerUpDigOn: 11 TimeThreshOff: 2600 TrigInhibitOff: 500 BunchClockCount: 1812 Cal0Delay: 750 Cal1Delay: 200 Cal2Delay: 200 Cal3Delay: 200 CalCount: '4' DacRampThresh: 240 DacRangeThreshold: 0 DacCalibration: 255 DacEventThreshold: 80 DacShaperBias: 120 DacDefaultAnalog: 189 DacThresholdA: 240 DacThresholdB: 0 CntrlDisPerReset: 'True' CntrlEnDcReset: 'True' CntrlHighGain: 'False' CntrlNearNeighbor: 'False' CntrlCalSource: 'Internal' CntrlForceTrigSource: 'Internal' CntrlHoldTime: 64x CntrlCalibHigh: 'False' CntrlShortIntEn: 'False' CntrlForceLowGain: 'False' CntrlLeakNullDisable: 'True' CntrlPolarity: Positive CntrlTrigDisable: 'True' CntrlDisPwrCycle: 'False' CntrlFeCurr: 31uA CntrlDiffTime: Half CntrlMonSource: None Chan_0_31: DDDDDDDD DDDDDDDD DDDDDDDD DDDDDDDD Chan_32_63: DDDDDDDD DDDDDDDD DDDDDDDD DDDDDDDD Chan_64_95: DDDDDDDD DDDDDDDD DDDDDDDD DDDDDDDD Chan_96_127: DDDDDDDD DDDDDDDD DDDDDDDD DDDDDDDD Chan_128_159: DDDDDDDD DDDDDDDD DDDDDDDD DDDDDDDD Chan_160_191: DDDDDDDD DDDDDDDD DDDDDDDD DDDDDDDD Chan_192_223: DDDDDDDD DDDDDDDD DDDDDDDD DDDDDDDD Chan_224_255: DDDDDDDD DDDDDDDD DDDDDDDD DDDDDDDD Chan_256_287: DDDDDDDD DDDDDDDD DDDDDDDD DDDDDDDD Chan_288_319: DDDDDDDD DDDDDDDD DDDDDDDD DDDDDDDD Chan_320_351: DDDDDDDD DDDDDDDD DDDDDDDD DDDDDDDD Chan_352_383: DDDDDDDD DDDDDDDD DDDDDDDD DDDDDDDD Chan_384_415: DDDDDDDD DDDDDDDD DDDDDDDD DDDDDDDD Chan_416_447: DDDDDDDD DDDDDDDD DDDDDDDD DDDDDDDD Chan_448_479: DDDDDDDD DDDDDDDD DDDDDDDD DDDDDDDD Chan_480_511: DDDDDDDD DDDDDDDD DDDDDDDD DDDDDDDD Chan_512_543: DDDDDDDD DDDDDDDD DDDDDDDD DDDDDDDD Chan_544_575: DDDDDDDD DDDDDDDD DDDDDDDD DDDDDDDD Chan_576_607: DDDDDDDD DDDDDDDD DDDDDDDD DDDDDDDD Chan_608_639: DDDDDDDD DDDDDDDD DDDDDDDD DDDDDDDD Chan_640_671: DDDDDDDD DDDDDDDD DDDDDDDD DDDDDDDD Chan_672_703: DDDDDDDD DDDDDDDD DDDDDDDD DDDDDDDD Chan_704_735: DDDDDDDD DDDDDDDD DDDDDDDD DDDDDDDD Chan_736_767: DDDDDDDD DDDDDDDD DDDDDDDD DDDDDDDD Chan_768_799: DDDDDDDD DDDDDDDD DDDDDDDD DDDDDDDD Chan_800_831: DDDDDDDD DDDDDDDD DDDDDDDD DDDDDDDD Chan_832_863: DDDDDDDD DDDDDDDD DDDDDDDD DDDDDDDD Chan_864_895: DDDDDDDD DDDDDDDD DDDDDDDD DDDDDDDD Chan_896_927: DDDDDDDD DDDDDDDD DDDDDDDD DDDDDDDD Chan_928_959: DDDDDDDD DDDDDDDD DDDDDDDD DDDDDDDD Chan_960_991: DDDDDDDD DDDDDDDD DDDDDDDD DDDDDDDD Chan_992_1023: DDDDDDDD DDDDDDDD DDDDDDDD DDDDDDDD KpixAsic[0]: enable: 'True' KpixAsic[1]: enable: 'False' KpixAsic[24]: enable: 'True' KpixDataRxArray: enable: 'True' KpixDataRx[*]: enable: 'True'
name: MacOS Wheel Deploy on: push: tags: - '*' jobs: build: runs-on: macos-latest strategy: matrix: python-version: [3.6, 3.7, 3.8, 3.9, '3.10'] steps: - uses: actions/checkout@v2 - name: Checkout submodules shell: bash run: | auth_header="$(git config --local --get http.https://github.com/.extraheader)" git submodule sync --recursive git -c "http.extraheader=$auth_header" -c protocol.version=2 submodule update --init --force --recursive --depth=1 - name: Setup Env uses: conda-incubator/setup-miniconda@v2 with: auto-update-conda: true python-version: ${{ matrix.python-version }} - name: Install verilator shell: bash run: | brew install verilator verilator --version - name: Install Python packages shell: bash -l {0} run: | pip install scikit-build pip install cmake twine wheel pytest - name: Build and run wheel shell: bash -l {0} env: OS: osx PYPI_PASSWORD: ${{ secrets.PYPI_PASSWORD }} # miniconda uses 10.9 MACOSX_DEPLOYMENT_TARGET: 10.9 run: | source ./scripts/ci.sh - name: Upload to PyPI shell: bash -l {0} run: twine upload --skip-existing dist/*.whl
<filename>.travis.yml<gh_stars>1-10 language: java dist: trusty jdk: - oraclejdk8 before_install: - chmod +x gradlew - sudo apt-get -qq update - sudo apt-get install -y libgnat-4.6 - chmod +x travis/ghdl-install.sh - travis/ghdl-install.sh after_success: - ./gradlew jacocoTestReport - bash <(curl -s https://codecov.io/bash)
language: python dist: Xenial python: "3.6" install: # Get Migen / LiteX / Cores - wget https://raw.githubusercontent.com/enjoy-digital/litex/master/litex_setup.py - python3 litex_setup.py init install script: python setup.py test
<gh_stars>10-100 name: CI Test on: [push] jobs: build: runs-on: ubuntu-latest if: "!contains(github.event.head_commit.message, 'skip ci')" steps: - uses: actions/checkout@v2 - name: Checkout submodules shell: bash run: | auth_header="$(git config --local --get http.https://github.com/.extraheader)" git submodule sync --recursive git -c "http.extraheader=$auth_header" -c protocol.version=2 submodule update --init --force --recursive --depth=1 - name: Pull and run docker 🐋 shell: bash run: | docker run -it -d --name lake --mount type=bind,source="$(pwd)"/../lake,target=/lake keyiz/kratos-full bash - name: Install deps 🛠️ shell: bash run: | docker exec -i lake bash -c 'apt update && apt install -y libgmp-dev libmpfr-dev libmpc-dev python3-dev' docker exec -i lake bash -c 'python3 -m pip install setuptools wheel' docker exec -i lake bash -c 'python3 -m pip install pytest fault pytest-codestyle pycodestyle' docker exec -i lake bash -c 'cd lake && python3 -m pip install -e .' - name: Run tests ⚙️ shell: bash run: | docker exec -i lake bash -c 'lake/.github/scripts/run.sh'
<filename>.pep8speaks.yml scanner: diff_only: True # If False, the entire file touched by the Pull Request is scanned for errors. If True, only the diff is scanned. linter: flake8 # Other option is pycodestyle no_blank_comment: False # If True, no comment is made on PR without any errors. descending_issues_order: True # If True, PEP 8 issues in message will be displayed in descending order of line numbers in the file pycodestyle: # Same as scanner.linter value. Other option is flake8 max-line-length: 110 # Default is 79 in PEP 8 flake8: max-line-length: 90 # Default is 79 in PEP 8 ignore: # Errors and warnings to ignore - E266 - E731 - E203 - E221 - W503 - F401 - E402 exclude: - tests - chiptools/testing
<reponame>slaclab/kpix DesyTrackerRoot: enable: 'True' ForceWrite: 'False' DesyTrackerRunControl: runRate: 'Auto' # DataWriter: # enable: 'True' # dataFile: '' # open: 'False' # bufferSize: '0' # maxFileSize: '0' DesyTracker: enable: 'True' AxiVersion: enable: 'True' ScratchPad: '0xDEADBEEF' EnvironmentMonitor: enable: 'True' Si7006[0]: enable: 'False' KpixDaqCore: enable: 'True' SysConfig: enable: 'True' RawDataMode: 'False' AutoReadDisable: 'False' KpixClockGen: enable: 'True' ClkSelReadout: '15' ClkSelDigitize: '15' ClkSelAcquire: '32' ClkSelIdle: '15' ClkSelPrecharge: '599' AcquisitionControl: enable: 'True' ExtTrigSrc: Disabled ExtTimestampSrc: Disabled ExtAcquisitionSrc: EthAcquire ExtStartSrc: EthStart Calibrate: 'False' KpixAsicArray: enable: 'True' KpixAsic[*]: enable: 'False' CfgAutoReadDisable: 'False' CfgForceTemp: 'False' CfgDisableTemp: 'False' CfgAutoStatusReadEn: 'True' TimeResetOn: 11 TimeResetOff: 900 TimeOffsetNullOff: 1000 TimeLeakageNullOff: 1 TimeDeselDelay: 18 TimeBunchClkDelay: 3000 TimeDigitizeDelay: 0x1 TimePowerUpOn: 11 TimePowerUpDigOn: 11 TimeThreshOff: 2600 TrigInhibitOff: 1000 BunchClockCount: 7000 Cal0Delay: 750 Cal1Delay: 50 Cal2Delay: 50 Cal3Delay: 50 CalCount: 1 DacRampThresh: 240 DacRangeThreshold: 0 DacCalibration: 245 DacEventThreshold: 80 DacShaperBias: 120 DacDefaultAnalog: 189 DacThresholdA: 200 DacThresholdB: 0 CntrlDisPerReset: 'True' CntrlEnDcReset: 'True' CntrlHighGain: 'True' CntrlNearNeighbor: 'False' CntrlCalSource: 'Disable' CntrlForceTrigSource: 'Disable' CntrlHoldTime: 64x CntrlCalibHigh: 'False' CntrlShortIntEn: 'False' CntrlForceLowGain: 'False' CntrlLeakNullDisable: 'True' CntrlPolarity: Positive CntrlTrigDisable: 'False' CntrlDisPwrCycle: 'False' CntrlFeCurr: 31uA CntrlDiffTime: Half CntrlMonSource: None Chan_0_31: AAAAAAAA AAAAAAAA AAAAAAAA AAAAAAAA Chan_32_63: AAAAAAAA AAAAAAAA AAAAAAAA AAAAAAAA Chan_64_95: AAAAAAAA AAAAAAAA AAAAAAAA AAAAAAAA Chan_96_127: AAAAAAAA AAAAAAAA AAAAAAAA AAAAAAAA Chan_128_159: AAAAAAAA AAAAAAAA AAAAAAAA AAAAAAAA Chan_160_191: AAAAAAAA AAAAAAAA AAAAAAAA AAAAAAAA Chan_192_223: AAAAAAAA AAAAAAAA AAAAAAAA AAAAAAAA Chan_224_255: AAAAAAAA AAAAAAAA AAAAAAAA AAAAAAAA Chan_256_287: AAAAAAAA AAAAAAAA AAAAAAAA AAAAAAAA Chan_288_319: AAAAAAAA AAAAAAAA AAAAAAAA AAAAAAAA Chan_320_351: AAAAAAAA AAAAAAAA AAAAAAAA AAAAAAAA Chan_352_383: AAAAAAAA AAAAAAAA AAAAAAAA AAAAAAAA Chan_384_415: AAAAAAAA AAAAAAAA AAAAAAAA AAAAAAAA Chan_416_447: AAAAAAAA AAAAAAAA AAAAAAAA AAAAAAAA Chan_448_479: AAAAAAAA AAAAAAAA AAAAAAAA AAAAAAAA Chan_480_511: AAAAAAAA AAAAAAAA AAAAAAAA AAAAAAAA Chan_512_543: AAAAAAAA AAAAAAAA AAAAAAAA AAAAAAAA Chan_544_575: AAAAAAAA AAAAAAAA AAAAAAAA AAAAAAAA Chan_576_607: AAAAAAAA AAAAAAAA AAAAAAAA AAAAAAAA Chan_608_639: AAAAAAAA AAAAAAAA AAAAAAAA AAAAAAAA Chan_640_671: AAAAAAAA AAAAAAAA AAAAAAAA AAAAAAAA Chan_672_703: AAAAAAAA AAAAAAAA AAAAAAAA AAAAAAAA Chan_704_735: AAAAAAAA AAAAAAAA AAAAAAAA AAAAAAAA Chan_736_767: AAAAAAAA AAAAAAAA AAAAAAAA AAAAAAAA Chan_768_799: AAAAAAAA AAAAAAAA AAAAAAAA AAAAAAAA Chan_800_831: AAAAAAAA AAAAAAAA AAAAAAAA AAAAAAAA Chan_832_863: AAAAAAAA AAAAAAAA AAAAAAAA AAAAAAAA Chan_864_895: AAAAAAAA AAAAAAAA AAAAAAAA AAAAAAAA Chan_896_927: AAAAAAAA AAAAAAAA AAAAAAAA AAAAAAAA Chan_928_959: AAAAAAAA AAAAAAAA AAAAAAAA AAAAAAAA Chan_960_991: AAAAAAAA AAAAAAAA AAAAAAAA AAAAAAAA Chan_992_1023: AAAAAAAA AAAAAAAA AAAAAAAA AAAAAAAA KpixAsic[0]: enable: 'True' KpixAsic[1]: enable: 'True' KpixAsic[24]: enable: 'True' KpixDataRxArray: enable: 'True' KpixDataRx[*]: enable: 'True'
# This is a basic workflow to help you get started with Actions name: Generate Bitstream # Controls when the action will run. Triggers the workflow on push or pull request # events but only for the main branch on: repository_dispatch: types: [test-pass] # A workflow run is made up of one or more jobs that can run sequentially or in parallel jobs: # This workflow contains a single job called "build" build: # The type of runner that the job will run on runs-on: ubuntu-latest # Steps represent a sequence of tasks that will be executed as part of the job steps: # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it - uses: actions/checkout@v2 with: ref: ${{ github.event.client_payload.ref }} path: main - run: echo ${{ github.event.client_payload.sha }} # Runs a single command using the runners shell - name: Clone symbiflow-arch-defs uses: actions/[email protected] with: repository: SymbiFlow/symbiflow-arch-defs path: symbiflow - name: Set up symbiflow-arch-defs run: | echo $(ls) echo $(pwd) cd symbiflow sudo make env sudo make all_xc7 - name: Upload a xc7 Artifact uses: actions/[email protected] with: name: xc7
GitBase: .. TopRoguePackage: pgp_pcie_apps RoguePackages: - submodules/axi-pcie-core/python - submodules/surf/python - ../software/python RogueConfig: RogueScripts: - ../software/scripts/LoopbackTesting.py - ../software/scripts/PgpTesting.py - ../software/scripts/PrbsTesting.py Targets: ################################################################################ SlacPgpCardG3Pgp2b: ImageDir: targets/SlacPgpCardG3/SlacPgpCardG3Pgp2b/images Extensions: - mcs SlacPgpCardG3Pgp4_6Gbps: ImageDir: targets/SlacPgpCardG3/SlacPgpCardG3Pgp4_6Gbps/images Extensions: - mcs ################################################################################ SlacPgpCardG4Pgp2b: ImageDir: targets/SlacPgpCardG4/SlacPgpCardG4Pgp2b/images Extensions: - mcs SlacPgpCardG4Pgp4_6Gbps: ImageDir: targets/SlacPgpCardG4/SlacPgpCardG4Pgp4_6Gbps/images Extensions: - mcs SlacPgpCardG4Pgp4_10Gbps: ImageDir: targets/SlacPgpCardG4/SlacPgpCardG4Pgp4_10Gbps/images Extensions: - mcs ################################################################################ XilinxAlveoU200Pgp2b: ImageDir: targets/XilinxAlveoU200/XilinxAlveoU200Pgp2b/images Extensions: - mcs XilinxAlveoU200Pgp4_6Gbps: ImageDir: targets/XilinxAlveoU200/XilinxAlveoU200Pgp4_6Gbps/images Extensions: - mcs XilinxAlveoU200Pgp4_10Gbps: ImageDir: targets/XilinxAlveoU200/XilinxAlveoU200Pgp4_10Gbps/images Extensions: - mcs XilinxAlveoU200Htsp_100Gbps: ImageDir: targets/XilinxAlveoU200/XilinxAlveoU200Htsp_100Gbps/images Extensions: - mcs ################################################################################ XilinxKcu1500Pgp2b: ImageDir: targets/XilinxKcu1500/XilinxKcu1500Pgp2b/images Extensions: - mcs XilinxKcu1500Pgp4_6Gbps: ImageDir: targets/XilinxKcu1500/XilinxKcu1500Pgp4_6Gbps/images Extensions: - mcs XilinxKcu1500Pgp4_10Gbps: ImageDir: targets/XilinxKcu1500/XilinxKcu1500Pgp4_10Gbps/images Extensions: - mcs ################################################################################ Releases: pgp_pcie_apps: Primary: True Targets: ####################################### - SlacPgpCardG3Pgp2b - SlacPgpCardG3Pgp4_6Gbps ####################################### - SlacPgpCardG4Pgp2b - SlacPgpCardG4Pgp4_6Gbps - SlacPgpCardG4Pgp4_10Gbps ####################################### - XilinxAlveoU200Pgp2b - XilinxAlveoU200Pgp4_6Gbps - XilinxAlveoU200Pgp4_10Gbps - XilinxAlveoU200Htsp_100Gbps ####################################### - XilinxKcu1500Pgp2b - XilinxKcu1500Pgp4_6Gbps - XilinxKcu1500Pgp4_10Gbps ####################################### Types: - FW_only
<filename>src/main/docker/gitlab-jenkins.yml version: "2" services: gitlab: image: gitlab/gitlab-ce:latest volumes: - artemis-gitlab-data:/var/opt/gitlab - artemis-gitlab-logs:/var/log/gitlab - artemis-gitlab-config:/etc/gitlab - ./gitlab/gitlab-setup.sh:/gitlab-setup.sh ports: - "2222:22" - "8082:80" mem_limit: 3000m hostname: gitlab networks: - artemis jenkins: build: jenkins volumes: - artemis-jenkins-data:/var/jenkins_home - /var/run/docker.sock:/var/run/docker.sock ports: - "8083:8080" hostname: jenkins networks: - artemis networks: artemis: driver: "bridge" volumes: artemis-gitlab-data: artemis-gitlab-logs: artemis-gitlab-config: artemis-jenkins-data:
<gh_stars>0 files: - common/source/clk_div_n.vhd - common/sim/clk_div_n_long_code_tb.vhd - common/sim/clk_div_n_stimulus_array_tb.vhd - common/sim/clk_div_n_stimulus_file_tb.vhd tests: - clk_div_n_long_code_tb - clk_div_n_stimulus_array_tb - clk_div_n_stimulus_file_tb
<reponame>f110/wing apiVersion: minio.f110.dev/v1alpha1 kind: MinIOUser metadata: name: test spec: selector: matchLabels: app: minio path: /object-storage/test
<reponame>makaimann/fault<gh_stars>10-100 name: Linux Test on: [push] jobs: build: runs-on: ubuntu-latest if: "!contains(github.event.head_commit.message, 'skip ci')" steps: - uses: actions/checkout@v2 - uses: actions/setup-python@v2 with: python-version: '3.8' - name: Install dependencies shell: bash run: | sudo apt install -y verilator libgmp-dev libmpfr-dev libmpc-dev iverilog verilator --version - name: Install Python packages shell: bash -l {0} run: | pip install "pytest<6" pip install pytest-cov pytest-pycodestyle pip install mantle>=2.0.0 # for tests.common pip install vcdvcd decorator kratos pip install smt-switch pono pip install . - name: Pytest shell: bash -l {0} run: | export LD_LIBRARY_PATH=/usr/lib:$LD_LIBRARY_PATH pytest --pycodestyle --cov-report=xml --cov=fault tests/ -v -r s - name: Coverage shell: bash -l {0} run: | bash <(curl -s https://codecov.io/bash)
<gh_stars>10-100 --- title: The SPU Mark II Instruction Set Architecture author: - Felix "xq" Queißner date: April 28, 2020 abstract: This is the documentation for the SPU Mark II instruction set architecture. It is a stack based 16 bit processor that features a highly configurable instruction set.
<gh_stars>10-100 description: >- Final version of data and analysis scripts for sigcomm submission experiment: fec repositories: P4Boosters: c1fd10db TofinoP4Boosters: 1d83cb15853 files: data: iperf_output/ config.yml: execution/cfgs/iperf.yml analysis.ipynb: analysis/fecAnalysis.ipynb run_script.sh: execution/run_iperf.sh documentation.md: fec.md throughput: analysis/tcp_throughput.pdf Encoder.tar.gz: ../bitstreams/RSEEncoder1Port.tar.gz Decoder.tar.gz: ../bitstreams/RSEDecoder1Port.tar.gz
dist: xenial language: python python: - 3.6.9 - '3.7' install: - pip install -r requirements.txt script: - "/bin/bash -c ci/run.sh" deploy: provider: pypi user: __token__ password: secure: <KEY> distributions: sdist bdist_wheel skip_existing: true skip_cleanup: true on: tags: true branches: only: - master
name: minerva32 version: 0.1.0.0 #github: "hesiod/minerva32" license: Apache-2.0 author: "<NAME>" maintainer: "<EMAIL>" copyright: "Tobias Markus 2018" extra-source-files: - README.md synopsis: RV32I CPU built on Clash category: Hardware # To avoid duplicated efforts in documentation and dealing with the # complications of embedding Haddock markup inside cabal files, it is # common to point users to the README.md file. #description: Please see the README on GitHub at <https://github.com/hesiod/rv32#readme> dependencies: - base >= 4.7 && < 5 library: source-dirs: hdl dependencies: - clash-prelude - data-default-class - ghc-typelits-natnormalise - ghc-typelits-knownnat - ghc-typelits-extra - deepseq - lens - interpolate exposed-modules: - Types - Stage.InstrFetch - Stage.InstrDecode - Stage.Execute - Stage.Writeback - Primitives.PLL - Primitives.DSP - Top tests: goldy: main: Test.hs source-dirs: - test dependencies: - minerva32 - clash-prelude - tasty - tasty-golden - binary - bytestring
<reponame>DanielTRYTRYLOOK/RDF-2020 name: aes_cipher_top clock_port: clk verilog: - aes_cipher_top.v - aes_inv_cipher_top.v - aes_inv_sbox.v - aes_key_expand_128.v - aes_rcon.v - aes_sbox.v - timescale.v
- name: update apt cache apt: update_cache=yes - name: Install desktop packages apt: name={{ item }} state=latest with_items: - ubuntu-desktop - virtualbox-guest-dkms - virtualbox-guest-utils - virtualbox-guest-x11 - build-essential - autoconf - name: Prepare node.js shell: 'curl -sL https://deb.nodesource.com/setup_8.x | sudo -E bash -' - name: Add yarn pubkey shell: 'curl -sS https://dl.yarnpkg.com/debian/pubkey.gpg | sudo apt-key add -' - name: Add yarn repo shell: 'echo "deb https://dl.yarnpkg.com/debian/ stable main" | sudo tee /etc/apt/sources.list.d/yarn.list' - name: Install toolchain packages apt: name={{ item }} state=latest with_items: - git - u-boot-tools - device-tree-compiler - build-essential - git - curl - ca-certificates - sudo - libxrender1 - libxtst6 - libxi6 - lib32ncurses5 - bc - u-boot-tools - device-tree-compiler - libncurses5-dev - libssl-dev - qemu-user-static - binfmt-support - dosfstools - parted - debootstrap - zerofree - lib32z1 - lib32stdc++6 # - schroot # - qemu qemu-user qemu-user-static - nodejs - yarn - name: allow any user to startx replace: dest=/etc/X11/Xwrapper.config regexp='allowed_users=.*$' replace='allowed_users=anybody' backup=yes - name: create Desktop directory in case it's not there yet file: path=/home/vagrant/Desktop state=directory mode=0755 owner=vagrant group=vagrant - name: copy default .profile to auto startx copy: src=roles/common/files/.profile dest=/home/vagrant backup=yes force=yes - name: create dir needed for autologin file: path=/etc/lightdm/lightdm.conf.d state=directory - name: enable autologin copy: src=roles/common/files/50-myconfig.conf dest=/etc/lightdm/lightdm.conf.d/50-myconfig.conf backup=yes force=yes - name: copy bashrc to have the proper PATH variable for the Xilinx tools copy: src=roles/common/files/.bashrc dest=/home/vagrant backup=yes force=yes - name: Symlinking make to gmake file: src=make dest=/usr/bin/gmake state=link become: true
<filename>server/launch/sample_balanced2/configuration.yml config: core_coordination_impl: redis debug_mode: true server_admin: <EMAIL> hosts: main_machine: config_file: main_machine/machine_config.py runner: launch_sample_balanced2_machine.py processes: main_instance1: components: core: config_file: main_machine/main_instance1/core/server_config.py type: core main_instance2: components: core: config_file: main_machine/main_instance2/core/server_config.py type: core main_instance3: components: core: config_file: main_machine/main_instance3/core/server_config.py type: core lab_and_experiment: components: laboratory: config_file: main_machine/lab_and_experiment/laboratory/server_config.py type: laboratory protocols: port: 10029 experiment: config_file: main_machine/lab_and_experiment/experiment/server_config.py type: experiment class: experiments.dummy.DummyExperiment host: 127.0.0.1
# This is a basic workflow to help you get started with Actions name: Build on: push: branches: [ master ] pull_request: branches: [ master ] jobs: build: runs-on: ubuntu-latest container: verilator/verilator strategy: matrix: node-version: [16.x] # See supported Node.js release schedule at https://nodejs.org/en/about/releases/ steps: - uses: actions/checkout@v2 - name: Setup cmake uses: jwlawson/[email protected] with: cmake-version: '3.16.x' - name: Use cmake run: cmake --version - name: Use Node.js ${{ matrix.node-version }} uses: actions/setup-node@v2 with: node-version: ${{ matrix.node-version }} - run: apt-get update && apt-get install --no-install-recommends -y zlib1g zlib1g-dev - run: cd sim && npm i && make
services: - docker script: # Get images - docker pull ghdl/ghdl:buster-gcc-8.3.0 - docker pull cdelledonne/msp430-gcc # Testing installations - docker run -t ghdl/ghdl:buster-gcc-8.3.0 ghdl --version - docker run -t cdelledonne/msp430-gcc msp430-elf-gcc -v # Run SW check (check if all example projects + bootloader are compiling) - > docker run -t -v `pwd`:/mnt/data cdelledonne/msp430-gcc /bin/bash -c "chmod u+x /mnt/data/travis_ci/sw_check.sh; /mnt/data/travis_ci/sw_check.sh" # Run HW check - > docker run -t -v `pwd`:/mnt/data ghdl/ghdl:buster-gcc-8.3.0 /bin/bash -c "chmod u+x /mnt/data/travis_ci/hw_check.sh; /mnt/data/travis_ci/hw_check.sh" notifications: email: on_success: never on_failure: always
<reponame>zephyr-cosim/zephyr-cosim description: LiteX VexRiscV interrupt controller compatible: "zephyr-cosim-intc" include: [interrupt-controller.yaml, base.yaml] properties: # riscv,max-priority: # type: int # description: maximum interrupt priority # required: true "#interrupt-cells": const: 2 interrupt-cells: - irq - priority
package: name: riscv-dbg sources: files: - src/dm_pkg.sv - debug_rom/debug_rom.sv - debug_rom/debug_rom_snd_scratch.sv - src/dm_csrs.sv - src/dm_mem.sv - src/dm_top.sv - src/dm_obi_top.sv - src/dmi_cdc.sv - src/dmi_jtag.sv - src/dmi_jtag_tap.sv - src/dm_sba.sv
apiVersion: v1 kind: Namespace metadata: name: harbor --- apiVersion: v1 kind: ConfigMap metadata: name: harbor namespace: harbor data: EXT_ENDPOINT: http://registry.f110.dev --- apiVersion: v1 kind: Secret metadata: name: harbor namespace: harbor stringData: HARBOR_ADMIN_PASSWORD: <PASSWORD> --- apiVersion: v1 kind: Service metadata: name: harbor namespace: harbor spec: selector: app.kubernetes.io/name: harbor ports: - port: 80 protocol: TCP
<reponame>Steinegger/cv32e40p package: name: riscv dependencies: fpnew: { git: "https://github.com/pulp-platform/fpnew.git", version: 0.6.1 } tech_cells_generic: { git: "https://github.com/pulp-platform/tech_cells_generic.git", version: 0.1.1 } sources: include_dirs: - rtl/include files: - rtl/include/apu_core_package.sv - rtl/include/riscv_defines.sv - rtl/include/riscv_ascon_defines.sv - rtl/include/riscv_tracer_defines.sv - rtl/register_file_test_wrap.sv - rtl/riscv_alu.sv - rtl/riscv_alu_div.sv - rtl/riscv_ascon.sv - rtl/riscv_compressed_decoder.sv - rtl/riscv_controller.sv - rtl/riscv_cs_registers.sv - rtl/riscv_decoder.sv - rtl/riscv_int_controller.sv - rtl/riscv_ex_stage.sv - rtl/riscv_hwloop_controller.sv - rtl/riscv_hwloop_regs.sv - rtl/riscv_id_stage.sv - rtl/riscv_if_stage.sv - rtl/riscv_load_store_unit.sv - rtl/riscv_mult.sv - rtl/riscv_pmp.sv - rtl/riscv_prefetch_buffer.sv - rtl/riscv_prefetch_L0_buffer.sv - rtl/riscv_core.sv - rtl/riscv_apu_disp.sv - rtl/riscv_fetch_fifo.sv - rtl/riscv_L0_buffer.sv - target: asic files: - rtl/riscv_register_file.sv - target: not(asic) files: - rtl/riscv_register_file.sv - target: rtl files: - rtl/riscv_tracer.sv - rtl/cv32e40p_sim_clock_gate.sv
<filename>mflowgen/common/garnet-requirements-check/configure.yml name: garnet-requirements commands: - $GARNET_HOME/bin/requirements_check.sh |& tee requirements_check.log - egrep '^\*\*\*ERROR' requirements_check.log && exit 13 || echo requirements check GOOD postconditions: - assert '***ERROR' not in File( 'requirements_check.log' )
<reponame>danaagur/opentitan # Copyright lowRISC contributors. # Licensed under the Apache License, Version 2.0, see LICENSE for details. # SPDX-License-Identifier: Apache-2.0 # Azure template for downloading pipeline step outputs and unpacking them. # # This template will download all artifacts from upstream jobs (which are # expected to use upload_artifacts_template.yml) and unpack them. # # This template expects that a variable $BUILD_ROOT is set to a writeable # directory; the results will be available in $BIN_DIR. See # util/build_consts.sh for more information. steps: - task: DownloadPipelineArtifact@2 inputs: buildType: current targetPath: '$(BUILD_ROOT)/downloads' # The first "path" segment is the name of the artifact. pattern: "*-build-bin/**" displayName: 'Download upstream outputs' - bash: | set -e test -n "$BUILD_ROOT" . util/build_consts.sh mkdir -p "$BIN_DIR" find "$BUILD_ROOT/downloads" \ -name 'build-bin.tar' \ -exec \ tar -C "$BIN_DIR" \ --strip-components=1 \ --overwrite \ -xvf {} \; displayName: 'Unpack upstream outputs'
%YAML 1.2 --- BirdEyeImgRatio: 0.00084 # 路面の俯瞰画像の取得倍率: 1pixあたりの大きさ(m) ExecutePeriod: wo_calculating: 0.001 line_tracing: 0.005 debagging: 0.100 Debug: imwrite_mode: 0
<reponame>diorga/snitch<filename>hw/vendor/pulp_platform_common_cells/.github/workflows/ci.yml name: ci on: [push] jobs: build: # Job name is Greeting name: Verible Lint # This job runs on Linux runs-on: ubuntu-18.04 env: VERIBLE_VERSION: v0.0-557-g287da29 steps: - uses: actions/checkout@v2 - name: Install Verible run: | set -e mkdir -p build/verible cd build/verible curl -Ls -o verible.tar.gz https://github.com/google/verible/releases/download/$VERIBLE_VERSION/verible-$VERIBLE_VERSION-Ubuntu-18.04-bionic-x86_64.tar.gz sudo mkdir -p /tools/verible && sudo chmod 777 /tools/verible tar -C /tools/verible -xf verible.tar.gz --strip-components=1 echo "::add-path::/tools/verible/bin" echo "::add-matcher::.github/verible-lint-matcher.json" # Run linter in ip hw subdir - name: Run Lint run: | find src \ -not \( -path src/deprecated -prune \) -name "*.sv" | \ xargs verible-verilog-lint --waiver_files lint/common_cells.style.waiver --rules=-interface-name-style --lint_fatal
<reponame>mfkiwl/NEM-Relay-CGRA dist: trusty language: c sudo: false addons: apt: # sources: # - ubuntu-toolchain-r-test packages: - csh - verilator # - build-essential install: # Current working directory: /home/travis/build/akashlevy/CGRAGenerator - git clone https://github.com/akashlevy/TestBenchGenerator.git --branch master ../TestBenchGenerator # Install python for testing infrastructure # All this just to get python3 => python3.6 or better # Because TBG (and LT unit tests) need python 3.6 or better - wget https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh -O miniconda.sh; - bash miniconda.sh -u -b -p $HOME/miniconda - export PATH="$HOME/miniconda/bin:$PATH" - hash -r - conda config --set always_yes yes --set changeps1 no - conda update -q conda - conda info -a # Needed by TestBenchGenerator - pip install delegator.py script: # "You can sandwich the stuff in script with set -e and set +e." # set -e => exit on failure. - set -e ################################################################ # Fifteen minutes for LT unit tests (background) # Install python dependencies - pip install pytest numpy delegator.py bit_vector==0.42a0 - pip install git+git://github.com/akashlevy/pe.git # Run python based tests - cd $TRAVIS_BUILD_DIR/tests/test_pe # Set env variables for genesis (installed earlier) - export GENESIS_HOME=/tmp/Genesis2/Genesis2Tools - export PATH=$GENESIS_HOME/bin:$GENESIS_HOME/gui/bin:$PATH - export PERL5LIB=$GENESIS_HOME/PerlLibs/ExtrasForOldPerlDistributions:$PERL5LIB - echo 'Fifteen minutes for LT unit tests (background)' - ../../bin/generate.csh - pytest >& $TRAVIS_BUILD_DIR/pytest.log & ######################################################################## - echo "Twenty-five minutes (1500s) for travis system tests" - cd $TRAVIS_BUILD_DIR/testdir; make SILENT=TRUE travis_system_tests MEMHEIGHT=short ######################################################################## # Summarize results here at the end - cat $TRAVIS_BUILD_DIR/pytest.log - grep RESULT $TRAVIS_BUILD_DIR/testdir/serpent_results.tmp | grep -v echo # Did LT tests pass!!?? If not, print results and FAIL - grep FAILURES $TRAVIS_BUILD_DIR/pytest.log && cat $TRAVIS_BUILD_DIR/pytest.log || echo "Lenny unit tests PASSED" - grep FAILURES $TRAVIS_BUILD_DIR/pytest.log && exit 13 || echo okay
icache-intc: files: [ DistributedArbitrationNetwork_Req_icache_intc.sv, DistributedArbitrationNetwork_Resp_icache_intc.sv, icache_intc.sv, Req_Arb_Node_icache_intc.sv, Resp_Arb_Node_icache_intc.sv, RoutingBlock_Req_icache_intc.sv, RoutingBlock_Resp_icache_intc.sv, lint_mux.sv, RoutingBlock_2ch_Req_icache_intc.sv, ] jg_slint_top_name: [ icache_intc ] jg_slint_elab_opt: [ ] jg_slint_postelab_cmds: [ ] jg_slint_clocks: [ clk_i, ] jg_slint_resets: [ ~rst_ni, ]
<reponame>mfkiwl/udma_hyper package: name: udma_hyper authors: - "<NAME> <<EMAIL>>" sources: - udma-hyperbus/src/cdc_fifo_gray_hyper.sv - udma-hyperbus/src/graycode_hyper.sv - udma-hyperbus/src/clock_diff_out.sv - udma-hyperbus/src/clk_gen_hyper.sv - udma-hyperbus/src/onehot_to_bin_hyper.sv - udma-hyperbus/src/ddr_out.sv - udma-hyperbus/src/hyperbus_delay_line.sv - udma-hyperbus/src/read_clk_rwds.sv - udma-hyperbus/src/hyperbus_phy.sv - udma-hyperbus/src/cmd_addr_gen.sv - udma-hyperbus/src/ddr_in.sv - udma-hyperbus/src/udma_hyper_reg_if_common.sv - udma-hyperbus/src/udma_hyper_reg_if_mulid.sv - udma-hyperbus/src/udma_rxbuffer.sv - udma-hyperbus/src/udma_txbuffer.sv - udma-hyperbus/src/udma_hyper_ctrl.sv - udma-hyperbus/src/udma_hyperbus_mulid.sv - udma-hyperbus/src/hyper_unpack.sv - udma-hyperbus/src/udma_cfg_outbuff.sv - udma-hyperbus/src/hyperbus_mux_generic.sv - udma-hyperbus/src/hyper_twd_trans_spliter.sv - udma-hyperbus/src/hyper_rr_flag_req.sv - udma-hyperbus/src/hyper_arbiter.sv - udma-hyperbus/src/hyper_arb_primitive.sv - udma-hyperbus/src/io_generic_fifo_hyper.sv - udma-hyperbus/src/udma_dc_fifo_hyper.sv - udma-hyperbus/src/dc_token_ring_fifo_din_hyper.v - udma-hyperbus/src/dc_token_ring_fifo_dout_hyper.v - udma-hyperbus/src/dc_token_ring_hyper.v - udma-hyperbus/src/dc_data_buffer_hyper.sv - udma-hyperbus/src/dc_full_detector_hyper.v - udma-hyperbus/src/dc_synchronizer_hyper.v - udma-hyperbus/src/udma_cmd_queue.sv - udma-hyperbus/src/udma_hyper_busy.sv - udma-hyperbus/src/udma_hyper_busy_phy.sv - udma-hyperbus/src/udma_hyper_top.sv
<gh_stars>1-10 pulp_cluster: vlog_opts: [ -L fpnew_lib, ] incdirs: [ ../../rtl/includes, ] files: [ packages/pulp_cluster_package.sv, rtl/core_region.sv, rtl/core_demux.sv, rtl/cluster_interconnect_wrap.sv, rtl/tcdm_banks_wrap.sv, rtl/periph_demux.sv, rtl/per_demux_wrap.sv, rtl/periph_FIFO.sv, rtl/cluster_peripherals.sv, rtl/cluster_clock_gate.sv, rtl/cluster_timer_wrap.sv, rtl/cluster_event_map.sv, rtl/dmac_wrap.sv, rtl/hwpe_subsystem.sv, rtl/cluster_bus_wrap.sv, rtl/axi_slice_wrap.sv, rtl/axi2mem_wrap.sv, rtl/axi2per_wrap.sv, rtl/per2axi_wrap.sv, rtl/pulp_cluster.sv, rtl/lockstep_ctrl.sv, rtl/lockstep_ctrl_wrap.sv, rtl/lockstep_unit.sv, rtl/lockstep_unit_wrap.sv, ]
environment: matrix: - PYTHON: "C:\\Python27" - PYTHON: "C:\\Python33" - PYTHON: "C:\\Python34" - PYTHON: "C:\\Python35" - PYTHON: "C:\\Python27-x64" - PYTHON: "C:\\Python33-x64" - PYTHON: "C:\\Python34-x64" - PYTHON: "C:\\Python35-x64" install: - "SET PATH=%PYTHON%;%PYTHON%\\Scripts;%PATH%" - "python setup.py install" build: off test_script: - "fusesoc init -y" - "fusesoc list-systems" - "fusesoc list-cores" - "fusesoc update"
<filename>ucode/code_dw.yml # # code_dw.yml # # Copyright (C) 2019-2021 ETH Zurich, University of Bologna and GreenWaves Technologies # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # Author: <NAME> <<EMAIL>> # mnemonics to simplify microcode writing mnemonics: base_addr_W: 0 base_addr_x: 1 base_addr_y: 2 base_addr_s: 3 weights_km_iter: 4 null5: 5 weights_km_reset_iter: 6 null7: 7 infeat_km_iter: 8 infeat_wom_iter: 9 infeat_hom_iter: 10 infeat_km_reset_iter: 11 infeat_wom_reset_iter: 12 infeat_hom_reset_iter: 13 outfeat_wom_iter: 14 outfeat_hom_iter: 15 outfeat_km_iter: 16 outfeat_wom_reset_iter: 17 outfeat_hom_reset_iter: 18 outfeat_km_reset_iter: 19 scale_km_iter: 20 zero: 21 # NE16 code code: j_major: - { op : add, a: base_addr_x, b: infeat_wom_iter } - { op : add, a: base_addr_y, b: outfeat_wom_iter } i_major: - { op : add, a: base_addr_x, b: infeat_wom_reset_iter } # infeat_wom_reset_iter = - subtile_nb_wo * infeat_wom_iter - { op : add, a: base_addr_y, b: outfeat_wom_reset_iter } # outfeat_wom_reset_iter = - subtile_nb_wo * outfeat_wom_iter - { op : add, a: base_addr_x, b: infeat_hom_iter } - { op : add, a: base_addr_y, b: outfeat_hom_iter } k_out_major: - { op : add, a: base_addr_x, b: infeat_hom_reset_iter } # infeat_hom_reset_iter = - subtile_nb_ho * infeat_hom_iter - { op : add, a: base_addr_y, b: outfeat_hom_reset_iter } # outfeat_hom_reset_iter = - subtile_nb_ho * outfeat_hom_iter - { op : add, a: base_addr_x, b: infeat_wom_reset_iter } # infeat_wom_reset_iter = - subtile_nb_wo * infeat_wom_iter - { op : add, a: base_addr_y, b: outfeat_wom_reset_iter } # outfeat_wom_reset_iter = - subtile_nb_wo * outfeat_wom_iter - { op : add, a: base_addr_W, b: weights_km_iter } - { op : add, a: base_addr_y, b: outfeat_km_iter } - { op : add, a: base_addr_x, b: infeat_km_iter } - { op : add, a: base_addr_s, b: scale_km_iter } fake_loop: - { op : mv, a: base_addr_x, b: base_addr_x } - { op : mv, a: base_addr_x, b: base_addr_x }
<reponame>mfkiwl/yifive_r0 --- project: description: "A Custom SoC for Google sponsored Open MPW shuttles for SKY130." foundry: "SkyWater" git_url: "https://github.com/dineshannayya/yifive_r0.git" organization: "None" organization_url: "None" owner: "<NAME>" process: "SKY130" project_name: "YiFive" project_id: "00000000" tags: - "Open MPW" - "MPW-TWO" category: "Processor" top_level_netlist: "caravel/verilog/gl/caravel.v" user_level_netlist: "verilog/gl/user_project_wrapper.v" version: "1.00" cover_image: "docs/source/_static/YiFive_Soc.png"
init: - git submodule init - git submodule update --init --recursive # reset is not yet supported reset: - git submodule deinit -f .
--- category: - Environment variables - Initialization - Simple note: Programming environment operations
<filename>examples/zedboard/picoblaze/config.yml --- name: picoblaze board: boards/zedboard version: 0.1.1 cores: - fpga/cores/axi_ctl_register_v1_0 - fpga/cores/axi_sts_register_v1_0 - fpga/cores/dna_reader_v1_0 - fpga/cores/kcpsm6_v1_0 memory: - name: control offset: '0x60000000' range: 4K - name: status offset: '0x50000000' range: 4K - name: picoram offset: '0x40000000' range: 4K control_registers: - led - in_port - reset status_registers: - out_port parameters: fclk0: 50000000 # FPGA clock speed in Hz xdc: - ./constraints.xdc drivers: - server/drivers/common.hpp - ./picoblaze.hpp web: - web/index.html - web/main.css - web/koheron.ts
version: 1.0.{build} environment: matrix: - compiler: msbuild CONFIGURATION: Release - compiler: msys2 ARCH: x64 MSYS2_ARCH: x86_64 MSYS2_DIR: msys64 MSYSTEM: MINGW64 - compiler: msys2 ARCH: x86 MSYS2_ARCH: i686 MSYS2_DIR: msys64 MSYSTEM: MINGW32 - compiler: msvc ARCH: x64 #- compiler: msvc # ARCH: x86 - compiler: mingw - compiler: cygwin build_script: - '%APPVEYOR_BUILD_FOLDER%\win32\appveyor.bat build' after_build: - '%APPVEYOR_BUILD_FOLDER%\win32\appveyor.bat package' test_script: - '%APPVEYOR_BUILD_FOLDER%\win32\appveyor.bat test' artifacts: - path: ctags-*.zip
<reponame>StanfordAHA/garnet name: CI Test on: # Every push push: # When requested manually workflow_dispatch: # Every morning at 3am -- how does it know which branch to run?? schedule: - cron: '0 3 * * *' jobs: build: runs-on: ubuntu-latest if: "!contains(github.event.head_commit.message, 'skip ci')" steps: - uses: actions/checkout@v2 - name: Checkout submodules shell: bash run: | auth_header="$(git config --local --get http.https://github.com/.extraheader)" git submodule sync --recursive git -c "http.extraheader=$auth_header" -c protocol.version=2 submodule update --init --force --recursive --depth=1 - name: Pull and run docker 🐋 shell: bash run: | docker pull keyiz/garnet-flow docker run -d -it --name garnet keyiz/garnet-flow bash docker cp ../garnet garnet:/ - name: Install deps 🛠️ shell: bash run: | docker exec -i garnet bash -c "apt update" docker exec -i garnet bash -c "apt install -y default-jre" docker exec -i garnet bash -c "pip install -r /garnet/requirements.txt" docker exec -i garnet bash -c "pip install coveralls" docker exec -i garnet bash -c "pip install pytest" docker exec -i garnet bash -c "pip install pytest-cov pytest-pycodestyle z3-solver genesis2" - name: Run tests ⚙️ shell: bash run: | docker exec -i garnet bash -c "/garnet/.github/scripts/run.sh" - name: Upload Coverage shell: bash # CODECOV_TOKEN secret stored at https://github.com/StanfordAHA/garnet/settings/secrets/actions run: | docker exec -i garnet bash -c "cd /garnet/ && bash <(curl -s https://codecov.io/bash) -t ${{ secrets.CODECOV_TOKEN }}"
<filename>Task/Palindrome-detection/00META.yaml --- category: - Recursion - String manipulation - Classic CS problems and programs - Palindromes note: Text processing
name: rnn-pytorch-1.4 channels: - pytorch - conda-forge - defaults dependencies: - python=3.6 - pytorch==1.4.0 - torchvision==0.5.0 - torchmetrics==0.5.0 - Pandas - Keras - pybind11 - ftfy - word2number - conllu 4.* - overrides 3.* - spacy 2.* - tqdm 4.48.* - docopt - transformers - toml - numpy - librosa - pysoundfile - prettytable - Unidecode==1.1.1 - scikit-learn<0.22 - inflect==4.1.0 - sox
<gh_stars>1000+ # Human readable task name name: PCIVault 1 # Long form description. description: |+ We plan to extend our cloud offering with a secure key-value store. Right now it is already a groundbreaking dedicated device that can store your data, but in the future we will provide military-grade encryption capability. You are one of the selected few that may test this offering in its alpha stage so that we can send you a quick, three hour survey afterwards. The login for your dedicated VM is `root:password`. # The flag flag: CTF{D1ff3r3nT_D3v1Ce_S4mE_Tr1Ckz} # Task category. (one of hardware, crypto, pwn, reversing, web, net, misc) category: hardware # === the fields below will be filled by SRE or automation === # Task label label: '' # URL for web challenges link: '' # host/port for non-web challenges host: 'pcivault.2021.ctfcompetition.com 1337' # the URL for attachments, to be filled in by automation attachment: '' # is this challenge released? Will be set by SREs visible: false
<filename>hw/vendor/openhwgroup_cva6/Bender.yml package: name: cva6 authors: - "<NAME> <<EMAIL>>" - "<NAME> <<EMAIL>>" dependencies: axi: {path: ../../vendor/pulp_platform_axi} common_cells: {path: ../../vendor/pulp_platform_common_cells} fpnew: {path: ../../vendor/pulp_platform_fpnew} reqrsp_interface: {path: ../../ip/reqrsp_interface} riscv-dbg: {path: ../../vendor/pulp_platform_riscv_dbg} sources: - defines: files: # Packages - include/riscv_pkg.sv - include/ariane_pkg.sv - include/std_cache_pkg.sv - include/wt_cache_pkg.sv - include/ariane_axi_pkg.sv # Stand-alone source files - src/ariane.sv - src/serdiv.sv - src/ariane_regfile_ff.sv - src/amo_buffer.sv - src/id_stage.sv - src/branch_unit.sv - src/instr_realign.sv - src/load_store_unit.sv - src/controller.sv - src/issue_stage.sv - src/re_name.sv - src/csr_buffer.sv - src/tlb.sv - src/decoder.sv - src/scoreboard.sv - src/perf_counters.sv - src/store_unit.sv - src/axi_adapter.sv - src/fpu_wrap.sv - src/csr_regfile.sv - src/commit_stage.sv - src/alu.sv - src/multiplier.sv - src/store_buffer.sv - src/compressed_decoder.sv - src/axi_shim.sv - src/ex_stage.sv - src/mmu.sv - src/ptw.sv - src/mult.sv - src/load_unit.sv - src/issue_read_operands.sv - src/pmp/src/pmp_entry.sv - src/pmp/src/pmp.sv - src/frontend/frontend.sv - src/frontend/instr_scan.sv - src/frontend/instr_queue.sv - src/frontend/bht.sv - src/frontend/btb.sv - src/frontend/ras.sv - src/cache_subsystem/tag_cmp.sv - src/cache_subsystem/cache_ctrl.sv - src/cache_subsystem/amo_alu.sv - src/cache_subsystem/wt_axi_adapter.sv - src/cache_subsystem/wt_dcache_ctrl.sv - src/cache_subsystem/wt_cache_subsystem.sv - src/cache_subsystem/wt_dcache_missunit.sv - src/cache_subsystem/cva6_icache.sv - src/cache_subsystem/wt_dcache_wbuffer.sv - src/cache_subsystem/wt_l15_adapter.sv - src/cache_subsystem/wt_dcache_mem.sv - src/cache_subsystem/miss_handler.sv - src/cache_subsystem/std_nbdcache.sv - src/cache_subsystem/cva6_icache_axi_wrapper.sv - src/cache_subsystem/std_cache_subsystem.sv - src/cache_subsystem/wt_dcache.sv - include_dirs: - src/util target: not(synthesis) files: - include/instr_tracer_pkg.sv - src/util/instr_tracer.sv - src/util/instr_tracer_if.sv
<gh_stars>0 language: java jdk: - oraclejdk7 - openjdk7 before_script: ./gradlew installApp script: ./parsertests.py
<filename>.github/workflows/sv-tests-ci.yml<gh_stars>0 name: sv-tests-ci on: push: branches: 'master' pull_request: jobs: Test: name: "Code Quality Checks" runs-on: ubuntu-18.04 steps: - name: Checkout code uses: actions/checkout@v2 with: submodules: true - uses: actions/setup-python@v2 with: python-version: 3.7 - name: Script run: pip install -r conf/requirements.txt - name: Make run: make format - name: Test run: test $(git status --porcelain | wc -l) -eq 0 || { git diff; false; } - name: License run: ./.github/workflows/check_license.sh Run: strategy: fail-fast: false matrix: env: - { JOB_NAME: iverilog, MAKEFLAGS: -j2 } - { JOB_NAME: moore, MAKEFLAGS: -j2 } - { JOB_NAME: odin_ii, MAKEFLAGS: -j2 } - { JOB_NAME: slang, MAKEFLAGS: -j2 } - { JOB_NAME: sv-parser, MAKEFLAGS: -j2 } - { JOB_NAME: surelog, MAKEFLAGS: -j2 } - { JOB_NAME: tree-sitter-verilog, MAKEFLAGS: -j2 } - { JOB_NAME: yosys, MAKEFLAGS: -j2 } - { JOB_NAME: antmicro-yosys, MAKEFLAGS: -j2 } - { JOB_NAME: verible, MAKEFLAGS: -j2 } - { JOB_NAME: verilator, MAKEFLAGS: -j2 } - { JOB_NAME: uhdm-integration-verilator, MAKEFLAGS: -j2 } - { JOB_NAME: uhdm-integration-yosys, MAKEFLAGS: -j2 } - { JOB_NAME: zachjs-sv2v, MAKEFLAGS: -j2 } name: ${{ matrix.env.JOB_NAME }} env: ${{ matrix.env }} runs-on: ubuntu-18.04 steps: - name: Checkout code uses: actions/checkout@v2 - uses: actions/setup-python@v2 with: python-version: 3.7 - name: Install run: ./.github/workflows/install.sh - name: Run run: ./.github/workflows/run.sh
<gh_stars>1-10 # Yaml configuration file for verible-verilog-format. # Generated from `verible-verilog-format --helpfull` output and # then formatted into a yaml file for convenience. # assignment_statement_alignment - Format various assignments: {align,flush-left,preserve,infer}; # default: infer; assignment_statement_alignment: align # case_items_alignment - Format case items: {align,flush-left,preserve,infer}; default: infer; case_items_alignment: align # class_member_variables_alignment - Format class member variables: # {align,flush-left,preserve,infer}; default: infer; class_member_variables_alignment: align # failsafe_success - If true, always exit with 0 status, even if there were input errors or # internal errors. In all error conditions, the original text is always preserved. This is # useful in deploying services where fail-safe behaviors should be considered a success.; # default: true; failsafe_success: true # formal_parameters_alignment - Format formal parameters: {align,flush-left,preserve,infer}; # default: infer; formal_parameters_alignment: align # formal_parameters_indentation - Indent formal parameters: {indent,wrap}; default: wrap; formal_parameters_indentation: wrap # max_search_states - Limits the number of search states explored during line wrap # optimization.; default: 100000; max_search_states: 100000 # named_parameter_alignment - Format named actual parameters: {align,flush-left,preserve,infer}; # default: infer; named_parameter_alignment: align # named_parameter_indentation - Indent named parameter assignments: {indent,wrap}; # default: wrap; named_parameter_indentation: wrap # named_port_alignment - Format named port connections: {align,flush-left,preserve,infer}; # default: infer; named_port_alignment: align # named_port_indentation - Indent named port connections: {indent,wrap}; default: wrap; named_port_indentation: wrap # net_variable_alignment - Format net/variable declarations: {align,flush-left,preserve,infer}; # default: infer; net_variable_alignment: align # port_declarations_alignment - Format port declarations: {align,flush-left,preserve,infer}; # default: infer; port_declarations_alignment: align # port_declarations_indentation - Indent port declarations: {indent,wrap}; default: wrap; port_declarations_indentation: wrap # show_equally_optimal_wrappings - If true, print when multiple optimal solutions are found # (stderr), but continue to operate normally.; default: false; show_equally_optimal_wrappings: false # show_inter_token_info - If true, along with show_token_partition_tree, include inter-token # information such as spacing and break penalties.; default: false; show_inter_token_info: false # show_largest_token_partitions - If > 0, print token partitioning and then exit without # formatting output.; default: 0; show_largest_token_partitions: 0 # show_token_partition_tree - If true, print diagnostics after token partitioning and then # exit without formatting output.; default: false; show_token_partition_tree: false # try_wrap_long_lines - If true, let the formatter attempt to optimize line wrapping decisions # where wrapping is needed, else leave them unformatted. This is a short-term measure to reduce # risk-of-harm.; default: false; try_wrap_long_lines: false # verify_convergence - If true, and not incrementally formatting with --lines, verify that # re-formatting the formatted output yields no further changes, i.e. formatting is convergent.; # default: true; verify_convergence: true
<reponame>slaclab/Simple-ZCU216-Example<gh_stars>0 Root: XilinxZcu216: Application: AppRingBuffer: Adc: Ch[:]: TrigBurst: 0x0 ContinuousMode: 0x1 Dac: Ch[:]: TrigBurst: 0x0 ContinuousMode: 0x1 DacSigGen: enable: True Enabled: 0x1 Continuous: 0x1 # CsvFilePath: 'config/DacSigGen/1tone_10_5MHz.csv' # CsvFilePath: 'config/DacSigGen/1tone_500MHz.csv' # CsvFilePath: 'config/DacSigGen/2tone_500MHz_500kHzspacing.csv' CsvFilePath: 'config/DacSigGen/Ntone_1GHz_blanksection.csv'
name: earlgrey description: Earlgrey design from opentitan top_module: top_earlgrey_nexysvideo tags: earlgrey uvm path: third_party/cores/opentitan command: fusesoc --cores-root third_party/cores/opentitan run --target=sim lowrisc:systems:top_earlgrey:0.1 conf_file: build/lowrisc_systems_top_earlgrey_0.1/sim-icarus/core-deps.mk test_file: earlgrey.sv timeout: 360
- status: 0 - openlane design prep runtime_s: 1.581 runtime_ts: 0h0m1s581ms - status: 1 - synthesis - yosys runtime_s: 18.689 runtime_ts: 0h0m18s689ms - status: 2 - sta - openroad runtime_s: 2.004 runtime_ts: 0h0m2s4ms - status: 3 - floorplan initialization - openroad runtime_s: 0.64 runtime_ts: 0h0m0s640ms - status: 4 - ioplace - openroad runtime_s: 0.32 runtime_ts: 0h0m0s320ms - status: 5 - tap/decap insertion - openroad runtime_s: 0.359 runtime_ts: 0h0m0s359ms - status: 6 - pdn generation - openroad runtime_s: 2.048 runtime_ts: 0h0m2s48ms - status: 7 - global placement - openroad runtime_s: 7.761 runtime_ts: 0h0m7s761ms - status: 8 - resizer design optimizations - openroad runtime_s: 7.128 runtime_ts: 0h0m7s128ms - status: 9 - write verilog - openroad runtime_s: 0.319 runtime_ts: 0h0m0s319ms - status: 10 - detailed placement - openroad runtime_s: 1.863 runtime_ts: 0h0m1s863ms - status: 11 - cts runtime_s: 43.656 runtime_ts: 0h0m43s655ms - status: 12 - write verilog - openroad runtime_s: 0.356 runtime_ts: 0h0m0s356ms - status: 13 - resizer timing optimizations - openroad runtime_s: 4.947 runtime_ts: 0h0m4s947ms - status: 14 - write verilog - openroad runtime_s: 0.353 runtime_ts: 0h0m0s353ms - status: 15 - detailed placement - openroad runtime_s: 0.596 runtime_ts: 0h0m0s596ms - status: 16 - global routing - openroad runtime_s: 5.758 runtime_ts: 0h0m5s758ms - status: 17 - fill insertion - openroad runtime_s: 0.498 runtime_ts: 0h0m0s498ms - status: 18 - write verilog - openroad runtime_s: 0.437 runtime_ts: 0h0m0s437ms - status: 19 - detailed_routing - openroad runtime_s: 157.825 runtime_ts: 0h2m37s824ms - status: 20 - write verilog - openroad runtime_s: 0.765 runtime_ts: 0h0m0s765ms - status: 21 - parasitics extraction - openroad runtime_s: 3.243 runtime_ts: 0h0m3s242ms - status: 22 - parasitics extraction - openroad runtime_s: 3.298 runtime_ts: 0h0m3s298ms - status: 23 - parasitics extraction - openroad runtime_s: 3.7960000000000003 runtime_ts: 0h0m3s796ms - status: 24 - sta - openroad runtime_s: 4.684 runtime_ts: 0h0m4s684ms - status: 25 - sta - openroad runtime_s: 18.14 runtime_ts: 0h0m18s140ms - status: 26 - gdsii - magic runtime_s: 12.885 runtime_ts: 0h0m12s884ms - status: 27 - gdsii - klayout runtime_s: 2.078 runtime_ts: 0h0m2s77ms - status: 28 - spice extraction - magic runtime_s: 12.902 runtime_ts: 0h0m12s901ms - status: 30 - write verilog - openroad runtime_s: 0.748 runtime_ts: 0h0m0s748ms - status: 30 - write powered verilog - openlane runtime_s: 0.89 runtime_ts: 0h0m0s890ms - status: 31 - lvs - netgen runtime_s: 3.097 runtime_ts: 0h0m3s96ms - status: 32 - drc - magic runtime_s: 71.137 runtime_ts: 0h1m11s137ms - status: 33 - antenna check - openroad runtime_s: 1.468 runtime_ts: 0h0m1s468ms - status: 34 - erc - cvc runtime_s: 0.82 runtime_ts: 0h0m0s820ms --- - status: routed runtime_s: 295.0 runtime_ts: 0h4m55s0ms - status: flow completed runtime_s: 403.0 runtime_ts: 0h6m43s0ms
<reponame>atmughrabi/CAPIPrecis<filename>.travis.yml sudo: required dist: xenial before_install: - sudo apt-get update -qq env: OMP_NUM_THREADS=4 language: c compiler: - gcc script: - make -j - make test notifications: email: on_success: never on_failure: always recipients: - <EMAIL> - <EMAIL> branches: only: - master
<filename>src/main/docker/activemq.yml version: '2' services: activemq-broker: image: vromero/activemq-artemis:latest environment: - ARTEMIS_USERNAME=guest - ARTEMIS_PASSWORD=<PASSWORD> ports: - 61613:61613
<reponame>ameetgohil/SignalFlip-js #========================================================================= # Travis CI Configuration #========================================================================= # This file configures Travis CI to automatically run tests for this RTL # project everytime a commit is made. # Prefer use of container-based environment as they start up faster and # provide more RAM and CPU cores. sudo: false node_js: - 16 # Install the build requirements addons: apt: sources: # For gcc-4.8 - ubuntu-toolchain-r-test packages: # Dependencies from tutorial - autoconf - automake - autotools-dev - curl - libmpc-dev - libmpfr-dev - libgmp-dev - gawk - build-essential - bison - flex - texinfo - gperf - libncurses5-dev - libusb-1.0-0 - libboost-dev # Need gcc-4.8 - gcc - g++ - gtkwave # No Git operations will be performed. Best to fetch only the latest commit. git: depth: 1 # TODO: Verify that this setting is optimal. env: - JOBS=2 # Don't need email notifications for now. notifications: email: false install: # Build Verilator. - git clone http://git.veripool.org/git/verilator - cd verilator - autoconf && ./configure && make - export VERILATOR_ROOT=${PWD} - export PATH=${VERILATOR_ROOT}/bin:$PATH - verilator -V - cd .. script: # Run the simulation. - nvm use || nvm install - npm i - make - make VERILATOR_USER_ARGS=-DEB0 - git clone https://github.com/ameetgohil/elastic-signalflip-example.git elastic && rm -rf elastic/.git - cd elastic - nvm use || nvm install - npm i - res=make - cd .. - git clone https://github.com/ameetgohil/basic-signalflip-example.git basic && rm -rf basic/.git - cd basic - nvm use || nvm install - npm i - res=res || make - cd .. - git clone https://github.com/ameetgohil/APB3-config-regs.git - cd APB3-config-regs/sim - nvm use || nvm install - npm i - res=res || make - cd ../.. - git clone https://github.com/ameetgohil/leading-zeroes-counter.git - cd leading-zeroes-counter/sim - nvm use || nvm install - npm i - res=res || make - cd ../.. - git clone --recursive https://github.com/ameetgohil/reciprocal-sv.git - cd reciprocal-sv/sim - nvm use || nvm install - npm i - res || make branches: only: - master
parameter_list: - &channel_width 1 - &code_precision 8 - &output_precision 8 - &ffe_length 3 - &ffe_weight_precision 8 - &estimate_depth 16 - &estimate_precision 8 - &sequence_length 5 - &decision_shift_precision 4 - &ffe_shift_precision 5 - &mlsd_bit_length 1 - &mlsd_est_cursor_pos 0 generic: parameters: channel_width: *channel_width code_precision : *code_precision ffe_length: *ffe_length ffe_weight_precision: *ffe_weight_precision ffe_shift: 8 mlsd_shift: 8 ffe: parameters: length : *ffe_length width : *channel_width input_precision : *code_precision output_precision: *output_precision weight_precision: *ffe_weight_precision shift_precision: *ffe_shift_precision adaptation: type: 'wiener' args: { mu : 0.1 } comp: parameters: width : *channel_width input_precision : *output_precision conf_precision : 8 thresh_precision : *output_precision threshold: value: 0 mlsd: parameters: width: *channel_width length: *sequence_length code_precision : *code_precision estimate_precision: *estimate_precision estimate_depth: *estimate_depth shift_precision: *decision_shift_precision bit_length: *mlsd_bit_length est_center: *mlsd_est_cursor_pos
top: cover_percentage: 81.25 coverage: 26 size: 32 type: <class 'cocotb_coverage.coverage.CoverItem'> top.fifo_empty: at_least: 1 bins:_hits: false: 74 true: 26 cover_percentage: 100.0 coverage: 2 size: 2 type: <class 'cocotb_coverage.coverage.CoverPoint'> weight: 1 top.fifo_full: at_least: 1 bins:_hits: false: 100 true: 0 cover_percentage: 50.0 coverage: 1 size: 2 type: <class 'cocotb_coverage.coverage.CoverPoint'> weight: 1 top.fifo_overflow: at_least: 1 bins:_hits: false: 100 true: 0 cover_percentage: 50.0 coverage: 1 size: 2 type: <class 'cocotb_coverage.coverage.CoverPoint'> weight: 1 top.fifo_threshold: at_least: 1 bins:_hits: false: 66 true: 34 cover_percentage: 100.0 coverage: 2 size: 2 type: <class 'cocotb_coverage.coverage.CoverPoint'> weight: 1 top.fifo_underflow: at_least: 1 bins:_hits: false: 84 true: 16 cover_percentage: 100.0 coverage: 2 size: 2 type: <class 'cocotb_coverage.coverage.CoverPoint'> weight: 1 top.rw: at_least: 1 bins:_hits: false: 47 true: 53 cover_percentage: 100.0 coverage: 2 size: 2 type: <class 'cocotb_coverage.coverage.CoverPoint'> weight: 1 top.rwXempty: at_least: 1 bins:_hits: (False, False): 37 (False, True): 10 (True, False): 37 (True, True): 16 cover_percentage: 100.0 coverage: 4 size: 4 type: <class 'cocotb_coverage.coverage.CoverCross'> weight: 1 top.rwXfull: at_least: 1 bins:_hits: (False, False): 47 (False, True): 0 (True, False): 53 (True, True): 0 cover_percentage: 50.0 coverage: 2 size: 4 type: <class 'cocotb_coverage.coverage.CoverCross'> weight: 1 top.rwXoverflow: at_least: 1 bins:_hits: (False, False): 47 (False, True): 0 (True, False): 53 (True, True): 0 cover_percentage: 50.0 coverage: 2 size: 4 type: <class 'cocotb_coverage.coverage.CoverCross'> weight: 1 top.rwXthreshold: at_least: 1 bins:_hits: (False, False): 29 (False, True): 18 (True, False): 37 (True, True): 16 cover_percentage: 100.0 coverage: 4 size: 4 type: <class 'cocotb_coverage.coverage.CoverCross'> weight: 1 top.rwXunderflow: at_least: 1 bins:_hits: (False, False): 39 (False, True): 8 (True, False): 45 (True, True): 8 cover_percentage: 100.0 coverage: 4 size: 4 type: <class 'cocotb_coverage.coverage.CoverCross'> weight: 1
<filename>.github/workflows/build.yml on: workflow_dispatch: schedule: - cron: '0 0 * * *' name: 'Nightly ZeroSoC Build' jobs: zerosoc_build_job: timeout-minutes: 60 runs-on: self-hosted steps: - uses: actions/checkout@v2 with: submodules: recursive - run: | python3 -m venv create $GITHUB_WORKSPACE/clean_env --clear source $GITHUB_WORKSPACE/clean_env/bin/activate git clone ssh://[email protected]/siliconcompiler/siliconcompiler.git cd siliconcompiler git submodule update --init --recursive third_party/tools/openroad git rev-parse HEAD pip install -e . pip install pytest cd .. - run: | source $GITHUB_WORKSPACE/clean_env/bin/activate pytest build.py env: SCPATH: $GITHUB_WORKSPACE/siliconcompiler/siliconcompiler
oscope_top_test: before_script: - cd projects/oscope/bmb7_cu stage: test script: - make Voscope_top_tb && make Voscope_top_leep && make clean oscope_top_bmb7: before_script: - cd projects/oscope/bmb7_cu && ls /non-free stage: synthesis script: - verilator -V && XILINX_VIVADO=$XILINX_VIVADO PATH=$XILINX_VIVADO/bin:$PATH make oscope_top.bit artifacts: name: "$CI_JOB_NAME-$CI_COMMIT_REF_NAME" expire_in: 1 week paths: - projects/oscope/bmb7_cu/oscope_top.bit oscope_top_marble: before_script: - cd projects/oscope/marble_family && ls /non-free && apt-get update && apt-get install -y yosys stage: synthesis script: - verilator -V && XILINX_VIVADO=$XILINX_VIVADO PATH=$XILINX_VIVADO/bin:$PATH make oscope_top.bit artifacts: name: "$CI_JOB_NAME-$CI_COMMIT_REF_NAME" expire_in: 1 week paths: - projects/oscope/marble_family/oscope_top.bit # oscope_program: # before_script: # - cd projects/oscope/software/bmb7 && wget -r -nH http://sliderule.dhcp.lbl.gov:8000/{r1,configuration} && cd .. # stage: program # dependencies: # - oscope_top_bmb7 # script: # - export PYTHONPATH=$PYTHONPATH:../../../build-tools/:../../../dsp && python prc.py -a 192.168.1.121 -r -b ../bmb7_cu/oscope_top.bit
name: vitis-ai-optimizer_tensorflow2 channels: - conda-forge dependencies: - python=3.8 - vai_optimizer_tensorflow2_gpu
name: pytest on: [push] jobs: build: runs-on: ubuntu-18.04 steps: - uses: actions/checkout@v2 - name: Set up Python 3.x uses: actions/setup-python@v2 with: python-version: '3.x' - name: Install dependencies run: | python -m pip install --upgrade pip pip install pytest pytest-xdist pip install pyyaml jinja2 wavedrom sudo dpkg --add-architecture i386 sudo apt update sudo apt install -y libxtst6:i386 libxft2:i386 wget https://download.altera.com/akdlm/software/acdsinst/20.1std/711/ib_installers/ModelSimSetup-20.1.0.711-linux.run chmod +x ModelSimSetup-20.1.0.711-linux.run ./ModelSimSetup-20.1.0.711-linux.run --mode unattended --accept_eula 1 --installdir $HOME/ModelSim-20.1.0 --unattendedmodeui none echo "$HOME/ModelSim-20.1.0/modelsim_ase/bin" >> $GITHUB_PATH - name: Test package run: | pytest -v -n auto
<gh_stars>100-1000 name: fomu_ci on: [push, pull_request] jobs: fomu-build: runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 - uses: actions/setup-python@v2 with: python-version: '3.7' - run: bash scripts/setup -ci - run: which pip3 && which python3 && which pip - run: make env - run: source env/conda/bin/activate cfu-common && which pip3 && which python3 && which pip - run: source env/conda/bin/activate cfu-common && riscv32-elf-gcc --version - run: pwd && source env/conda/bin/activate cfu-common && source environment && yosys --version && nextpnr-ice40 --version - run: pwd && source env/conda/bin/activate cfu-common && source environment && cd proj/proj_template_v && pip3 list && make TARGET=kosagi_fomu bitstream - run: pwd && source env/conda/bin/activate cfu-common && source environment && cd proj/kws_micro_accel && pip3 list && make TARGET=kosagi_fomu bitstream || true
<filename>models/AI-Model-Zoo/model-list/cf_multitask_bdd_288_512_14.8G_2.0/model.yaml # Copyright 2019 Xilinx Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. description: detection&segmentation on bdd dataset. input size: 288*512 float ops: 14.8G task: detection&segmentation framework: caffe prune: 'no' version: 2.0 files: - name: cf_multitask_bdd_288_512_14.8G_2.0 type: float & quantized board: GPU download link: https://www.xilinx.com/bin/public/openDownload?filename=cf_multitask_bdd_288_512_14.8G_2.0.zip checksum: 11dfd51e80a8effc946b911fe905fcc8 - name: multi_task type: xmodel board: zcu102 & zcu104 & kv260 download link: https://www.xilinx.com/bin/public/openDownload?filename=multi_task-zcu102_zcu104_kv260-r2.0.0.tar.gz checksum: b7cd43eacd3b460350ed33cf6e79d70d - name: multi_task type: xmodel board: vck190 download link: https://www.xilinx.com/bin/public/openDownload?filename=multi_task-vck190-r2.0.0.tar.gz checksum: 2bc05520928878148bd377ff0084a764 - name: multi_task type: xmodel board: vck50006pe-DPUCVDX8H-DWC download link: https://www.xilinx.com/bin/public/openDownload?filename=multi_task-vck50006pe-DPUCVDX8H-DWC-r2.0.0.tar.gz checksum: eac91b74e99ec7b4c66484a7dd27b99c - name: multi_task type: xmodel board: vck50008pe-DPUCVDX8H download link: https://www.xilinx.com/bin/public/openDownload?filename=multi_task-vck50008pe-DPUCVDX8H-r2.0.0.tar.gz checksum: fa0ee69b793039e1aed3af842ff55bac - name: multi_task type: xmodel board: u50lv-DPUCAHX8H download link: https://www.xilinx.com/bin/public/openDownload?filename=multi_task-u50lv-DPUCAHX8H-r2.0.0.tar.gz checksum: ba1b9042afc7bfd70825df92da9dac7d - name: multi_task type: xmodel board: u50lv-DPUCAHX8H-DWC & u55c-DPUCAHX8H-DWC download link: https://www.xilinx.com/bin/public/openDownload?filename=multi_task-u55c-u50lv-DPUCAHX8H-DWC-r2.0.0.tar.gz checksum: df2a693ba87282a22ab49eaa7cda85d0 license: https://github.com/Xilinx/Vitis-AI/blob/master/LICENSE
<reponame>nicolasruscher/Artemis blank_issues_enabled: false contact_links: - name: ❓ Support Question url: https://artemis.ase.in.tum.de/mailto about: Please do not file questions or support requests on the GitHub issues tracker. Please contact the Artemis Team using Email at <EMAIL>.
stages: - Selftest variables: GIT_SUBMODULE_STRATEGY: recursive pyIPCMI-Selftest: stage: Selftest before_script: - pip3 install -r tools/GitLab-CI/requirements.txt # - ./tools/GitLab-CI/ghdl.setup.sh - ./tools/GitLab-CI/poc.setup.sh script: # - ./tools/GitLab-CI/poc.dryrun.sh - ./tools/GitLab-CI/poc.run.sh "PoC.*"
# Copyright lowRISC contributors. # Licensed under the Apache License, Version 2.0, see LICENSE for details. # SPDX-License-Identifier: Apache-2.0 # The default configuration, which picks at random from various other # configurations inherit: - base - cfgs: loopy weight: 0.1
name: sw-ci on: schedule: - cron: '0 0 * * *' pull_request: branches: [develop] jobs: sw-release: runs-on: [self-hosted, Linux, ARM, Pi] steps: - name: Git Checkout uses: actions/checkout@v2 - name: Configure CMake run: cmake -GNinja -Bbuild - name: Build libDigitizer.so run: cmake --build build --target libDigitizer.so - name: Build test run: cmake --build build --target register - name: Upload libDigitizer Artifacts uses: actions/upload-artifact@v2 with: name: libDigitizer path: | build/sw/lib/libDigitizer.so build/sw/test/register
dist: bionic addons: apt: packages: - libzmq3-dev language: cpp compiler: gcc before_install: - sudo apt update - sudo apt -y install libpugixml-dev - sudo apt -y install libboost-all-dev before_script: - mkdir systemc - wget https://accellera.org/images/downloads/standards/systemc/systemc-2.3.3.tar.gz - tar -xf systemc-2.3.3.tar.gz - cd systemc-2.3.3 - mkdir objdir - cd objdir - ../configure --prefix=/usr/local/systemc-2.3.3 - make -j - sudo mkdir /usr/local/systemc-2.3.3 - sudo make install - sudo sed -i '/using std::gets;/d' /usr/local/systemc-2.3.3/include/systemc.h - cd ../.. - cd simulator - mkdir build - cd build - cmake .. script: - make #- ./sim
<gh_stars>0 sim.inputs.top_module: "hdc_sensor_fusion" sim.inputs.tb_dut: "dut" sim.inputs.tb_name: "hdc_sensor_fusion_tb" sim.inputs.input_files_meta: "append" sim.inputs.input_files: - "src/HDC_Sensor_Fusion_9M/hdc_sensor_fusion.sv" - "src/HDC_Sensor_Fusion_9M/memory_wrapper_gsr.sv" - "src/HDC_Sensor_Fusion_9M/memory_wrapper_ecg.sv" - "src/HDC_Sensor_Fusion_9M/memory_wrapper_eeg.sv" - "src/HDC_Sensor_Fusion_9M/hdc_sensor_fusion_tb.sv" - "src/HDC_Sensor_Fusion_9M/associative_memory.sv" - "src/HDC_Sensor_Fusion_9M/hv2000_binary_adder.sv" - "src/HDC_Sensor_Fusion_9M/fuser.v" - "src/HDC_Sensor_Fusion_9M/memory_controller.sv" - "src/HDC_Sensor_Fusion_9M/spatial_encoder.v" - "src/HDC_Sensor_Fusion_9M/temporal_encoder.v" sim.inputs: timescale: "1ps/1ps" options: - "-notice" - "-line" - "-debug_pp" - "-debug_all" - "+v2k" - "+lint=all,noVCDE" - "+incdir+../../src/HDC_Sensor_Fusion_9M" - "+define+CLOCK_PERIOD=9009" - "-sverilog" execute_sim: true execution_flags: ["+verbose=1"]
<filename>.github/workflows/docs.yml name: 'doc' on: push: jobs: linux: runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 - uses: buildthedocs/btd@v0 with: token: ${{ github.token }}
os: linux dist: xenial language: c compiler: - gcc addons: apt: sources: - ubuntu-toolchain-r-test - llvm-toolchain-precise packages: - gcc-5 - g++-5 before_install: - sudo apt-get -qq update # - sudo dpkg --add-architecture i386 # - sudo apt-get update # - sudo apt-get install libc6:i386 libstdc++6:i386 # - sudo apt-get install gcc-multilib libasound2-dev:i386 libgsm1-dev:i386 libjpeg8-dev:i386 liblcms2-dev:i386 libldap2-dev:i386 libmpg123-dev:i386 libopenal-dev:i386 libv4l-dev:i386 libx11-dev:i386 libxinerama-dev:i386 libxml2-dev:i386 zlib1g-dev:i386 # - sudo apt-get install libcapi20-dev:i386 libcups2:i386 libdbus-1-3:i386 libfontconfig:i386 libfreetype6:i386 libglu1-mesa:i386 libgnutls26:i386 libncurses5:i386 libosmesa6:i386 libsane:i386 libxcomposite1:i386 libxcursor1:i386 libxi6:i386 libxrandr2:i386 libxslt1.1:i386 ocl-icd-libopencl1:i386 install: - if [ "$CXX" = "g++" ]; then export CXX="g++-5" CC="gcc-5"; fi script: - cd .. - git clone https://github.com/wine-staging/wine-staging.git - cd wine-staging/staging - ./patchinstall.py -d ../../wine -a - cd ../../wine - ./configure --enable-win64 --prefix=/home/travis/wine/build - make -j$(nproc) after_success: - wget -c https://github.com/probonopd/uploadtool/raw/master/upload.sh - bash upload.sh build/*
language: python install: - pip install -r requirements.txt - pip install . script: coverage run -m unittest after_success: codecov
<reponame>rodrigomelo9/zynq-examples<gh_stars>1-10 name: 'hdl' on: push: jobs: examples: name: 'Running examples' runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 - name: Pulling a container image with FLOSS synthesizers run: docker pull hdlc/ghdl:yosys - name: Running synthesis for VHDL files run: make -C hdl vhdl - name: Running synthesis for Verilog files run: make -C hdl vlog
# Copyright 2022 ETH Zurich # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. package: name: clic authors: - "<NAME> <<EMAIL>>" - "<NAME><<EMAIL>>" dependencies: common_cells: { git: "https://github.com/pulp-platform/common_cells.git", version: 1.21.0 } register_interface: { git: "https://github.com/pulp-platform/common_cells.git", version: 0.3.1 } sources: # - src/gen/clic_reg_pkg.sv # - src/gen/clic_reg_top.sv # - src/gen/clic_reg_adapater.sv - src/clic_reg_pkg.sv - src/clic_reg_top.sv - src/clic_reg_adapter.sv - src/clic_gateway.sv - src/clic_target.sv - src/clic.sv
<gh_stars>1-10 language: cpp dist: bionic os: linux compiler: gcc addons: apt: sources: - ubuntu-toolchain-r-test packages: - build-essential - valgrind - verilator - yosys install: # TOOLCHAIN - ci/toolchain_install.sh -all - export RISCV_TOOLCHAIN_PATH=/opt/riscv-gnu-toolchain - export VERILATOR_ROOT=/opt/verilator - export PATH=$VERILATOR_ROOT/bin:$PATH # VORTEX - git clone --recursive https://github.com/vortexgpgpu/vortex.git - cd vortex - make -j`nproc` script: - ci/test_runtime.sh - ci/test_driver.sh - ci/test_riscv_isa.sh - ci/test_opencl.sh after_success: # Gather code coverage - lcov --directory . --capture --output-file coverage.info # capture trace - lcov --list coverage.info # trace report # Upload coverage report - bash <(curl -s https://codecov.io/bash)
<reponame>ahadnagy/fletcher-alveo<filename>.github/workflows/book.yml name: Book on: push: branches: - master pull_request: paths: - 'book/**' jobs: build: name: Build runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 - name: Install mdbook run: | mkdir -p $HOME/mdbook curl -L https://github.com/rust-lang/mdBook/releases/download/v0.4.5/mdbook-v0.4.5-x86_64-unknown-linux-gnu.tar.gz | tar xz -C $HOME/mdbook echo "${HOME}/mdbook/" >> $GITHUB_PATH - name: Build run: mdbook build working-directory: book - uses: actions/upload-artifact@v2 with: name: book path: book/book deploy: name: Deploy runs-on: ubuntu-latest needs: [build] if: github.event_name == 'push' && github.ref == 'refs/heads/master' steps: - uses: actions/download-artifact@v2 with: name: book - uses: peaceiris/actions-gh-pages@v3 with: github_token: ${{ secrets.GITHUB_TOKEN }} publish_dir: .
<reponame>sarnold/chiptools<filename>.pre-commit-config.yaml<gh_stars>0 # To install the git pre-commit hook run: # pre-commit install # To update the pre-commit hooks run: # pre-commit install-hooks repos: - repo: meta hooks: - id: check-useless-excludes - id: check-hooks-apply - repo: https://github.com/pre-commit/pre-commit-hooks rev: v4.0.1 hooks: - id: trailing-whitespace - id: end-of-file-fixer exclude: '(.*tests/.*|.*testing/.*|.*test.py$)' - id: mixed-line-ending args: [--fix=lf] - id: check-toml #- id: check-json - id: check-yaml exclude: '(conda/meta.yaml|.pep8speaks.yml)' # use ffffff (black fork) for single quote normalization # (otherwise switch to black for double quotes) - repo: https://github.com/grktsh/ffffff rev: v2020.8.31 hooks: - id: ffffff name: "Format code (ffffff)" files: ^chiptools.*\.py$ exclude: '(.*testing/.*)' language_version: python3 # - repo: "https://github.com/psf/black" # rev: "21.9b0" # hooks: # - id: "black" # language_version: python3 # name: "Format code (black)" # exclude: '(^docs/.*|.*tests/.*|.*testing/.*|.*test.py$)' #- repo: "https://github.com/asottile/blacken-docs" #rev: "v1.11.0" #hooks: #- id: "blacken-docs" #name: "Format docs (blacken-docs)" #args: ["-l", "64"] #additional_dependencies: #- "black==21.9b0" #- repo: https://github.com/PyCQA/doc8 #rev: 0.9.1 #hooks: #- id: doc8 #args: #- '--max-line-length=90' #- '--ignore=D001' - repo: https://github.com/pre-commit/pygrep-hooks rev: v1.9.0 hooks: - id: rst-backticks # exclude: ChangeLog\.rst$ - id: rst-directive-colons - id: rst-inline-touching-normal - repo: https://github.com/pre-commit/mirrors-mypy rev: v0.910-1 hooks: - id: mypy args: - --follow-imports=normal - --install-types - --non-interactive files: ^chiptools.*\.py$ - repo: https://github.com/myint/autoflake rev: v1.4 hooks: - id: autoflake files: ^chiptools.*\.py$ exclude: '(.*testing/.*)' args: - --in-place - --remove-duplicate-keys - --remove-unused-variables #- repo: https://gitlab.com/pycqa/flake8 #rev: 3.9.2 #hooks: #- id: flake8 #files: ^chiptools.*\.py$ #exclude: '(.*testing/.*)' #additional_dependencies: ["flake8-bugbear"] #- repo: https://github.com/PyCQA/bandit #rev: 1.7.0 #hooks: #- id: bandit ##args: ["-ll", "-b", "bandit_baseline.json"] #- repo: https://github.com/PyCQA/pylint #rev: v2.11.1 #hooks: #- id: pylint #exclude: '(^docs/.*|.*tests/.*|.*testing/.*|.*test.py$)' #args: #[ #"-rn", #"-sn", #"--fail-under=5.75", #] - repo: https://github.com/lovesegfault/beautysh rev: v6.2.1 hooks: - id: beautysh ci: autofix_commit_msg: | [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci autofix_prs: false autoupdate_commit_msg: '[pre-commit.ci] pre-commit autoupdate' autoupdate_schedule: weekly skip: [] submodules: false # re-running a pull request: you can trigger a re-run on a pull request by # commenting pre-commit.ci run (must appear on a line by itself). # skipping push runs: skip a run by putting [skip ci], [ci skip], # [skip pre-commit.ci], or [pre-commit.ci skip] in the commit message.
<reponame>oswaldlo1/snitch package: name: axi_riscv_atomics authors: ["<NAME> <<EMAIL>>", "<NAME> <<EMAIL>>"] dependencies: axi: { git: "https://github.com/pulp-platform/axi.git", version: 0.35.1 } common_cells: { git: "https://github.com/pulp-platform/common_cells.git", version: 1.11.0 } common_verification: { git: "https://github.com/pulp-platform/common_verification.git", version: 0.2.1 } sources: # Source files grouped in levels. Files in level 0 have no dependencies on files in this package. # Files in level 1 only depend on files in level 0, files in level 2 on files in levels 1 and 0, # etc. Files within a level are ordered alphabetically. # Level 0 - src/axi_res_tbl.sv - src/axi_riscv_amos_alu.sv # Level 1 - src/axi_riscv_amos.sv - src/axi_riscv_lrsc.sv # Level 2 - src/axi_riscv_atomics.sv - src/axi_riscv_lrsc_wrap.sv # Level 3 - src/axi_riscv_amos_wrap.sv - src/axi_riscv_atomics_wrap.sv - target: test files: - test/tb_axi_pkg.sv - test/golden_memory.sv - test/axi_riscv_atomics_tb.sv - test/axi_riscv_lrsc_tb.sv - target: synth_test files: - test/axi_riscv_lrsc_synth.v
<gh_stars>1-10 name: Build on: [push] jobs: build: runs-on: ubuntu-latest strategy: max-parallel: 4 matrix: python-version: [3.6, 3.7, 3.8] steps: - uses: actions/checkout@v1 - name: Cache iverilog uses: actions/cache@v1 env: cache-name: cache-iverilog with: path: ./iverilog key: ${{ runner.os }}-build${{ env.cache-name }} - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v1 with: python-version: ${{ matrix.python-version }} - name: Install dependencies run: | python -m pip install -r ci/requirements.txt #sudo apt-get install iverilog sudo apt-get install -y -qq flex bison gperf sudo bash ci/install_iverilog.sh - name: Install uvm-python package run: | python -m pip install --user . - name: Lint with flake8 run: | python -m pip install flake8 # stop the build if there are Python syntax errors or undefined names flake8 ./src --count --select=E9,F63,F7,F82 --show-source --statistics # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide flake8 ./src --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics - name: Test with make test run: | export SIM=icarus export PYTHONPATH=`pwd` ln -s src/uvm uvm make test
<reponame>e-matthews/cva5<gh_stars>1-10 stages: - build-toolchain - embench - compliance-test - dhrystone - coremark #- util_timing_rpt build-toolchain: tags: - new-taiga-ci stage: build-toolchain script: - echo $CI_PROJECT_NAMESPACE - echo $CI_PROJECT_PATH - echo $CI_PROJECT_ROOT_NAMESPACE - echo $CI_PROJECT_TITLE - module load LANG/PYTHON/3.7.6 - pip3 install meson # create the taiga project file hierachy - cd .. - rm -rf taiga-project - cp -r $CI_PROJECT_TITLE taiga #- cp -r project-0 taiga # only used for local tests - git clone https://gitlab.com/sfu-rcl/taiga-project.git - mv -f taiga taiga-project/ - cd taiga-project - source settings.sh - git clone https://github.com/gcc-mirror/gcc.git --branch releases/gcc-10 --single-branch tool-chain/gcc - git clone https://gitlab.com/sfu-rcl/taiga-picolibc.git --branch master --single-branch tool-chain/picolibc - git clone http://git.veripool.org/git/verilator --branch master --single-branch tool-chain/verilator - git clone https://sourceware.org/git/binutils-gdb.git --branch binutils-2_35-branch --single-branch tool-chain/binutils-gdb - git clone https://sourceware.org/git/newlib-cygwin.git --branch master --single-branch tool-chain/newlib-cygwin - git clone https://gitlab.com/sfu-rcl/taiga-embench.git --branch taiga-picolibc --single-branch benchmarks/embench - git clone https://gitlab.com/sfu-rcl/taiga-riscv-compliance.git --branch taiga-sim --single-branch benchmarks/riscv-compliance - git clone https://gitlab.com/sfu-rcl/taiga-dhrystone.git --branch master --single-branch benchmarks/taiga-dhrystone - git clone https://gitlab.com/sfu-rcl/taiga-coremark.git --branch master --single-branch benchmarks/taiga-coremark - cd tool-chain/gcc - ./contrib/download_prerequisites - cd $TAIGA_PROJECT_ROOT - ./build-tool-chain.sh - cp -rf $TAIGA_PROJECT_ROOT/tool-chain $CI_PROJECT_DIR #needed for caching embench: tags: - new-taiga-ci stage: embench script: # create the taiga project file hierachy - pip3 install pandas - cd .. - cd taiga-project - source settings.sh - SCRIPT_DIR=$TAIGA_PROJECT_ROOT/tool-chain/scripts - bash -x $SCRIPT_DIR/embench.sh - python3 $SCRIPT_DIR/embench-check.py compliance-test: tags: - new-taiga-ci stage: compliance-test script: - cd .. - cd taiga-project - source settings.sh - SCRIPT_DIR=$TAIGA_PROJECT_ROOT/tool-chain/scripts - bash -x $SCRIPT_DIR/compliance.sh - make run-compliance-tests-verilator allow_failure: true dhrystone: tags: - new-taiga-ci stage: dhrystone script: - cd .. - cd taiga-project - source settings.sh - SCRIPT_DIR=$TAIGA_PROJECT_ROOT/tool-chain/scripts - bash -x $SCRIPT_DIR/dhrystone.sh coremark: tags: - new-taiga-ci stage: coremark script: - cd .. - cd taiga-project - source settings.sh - SCRIPT_DIR=$TAIGA_PROJECT_ROOT/tool-chain/scripts - bash -x $SCRIPT_DIR/coremark.sh allow_failure: true #util_timing_rpt-test: #tags: #- new-taiga-ci #stage: util_timing_rpt #script: #- source settings.sh #- cd .. #- cd taiga-project #- git clone https://gitlab.com/sfu-rcl/taiga-coremark.git --branch master --single-branch benchmarks/taiga-coremark #- source /exports/rcl-tools/Vivado/Vivado/2019.2/settings64.sh #- bash taiga/tools/PPA.sh
<reponame>tzaumiaan/vhdl_repo<gh_stars>0 src_list: rtl: - cordic_core.vhd - cordic_lzsh.vhd - cordic_top.vhd tb: - tb_pkg.vhd - tb.vhd submodules: null sim: top_name: tb pat_in: pat_in.txt pat_out: pat_out.txt dut_out: dut_out.txt pat_gen_script: pat_gen_cordic pat_comp_script: pat_comp_cordic fixed_cases: - basic generated_cases: vector_fixed_ampl: timeout: "20 ms" n_pat: 72000 cordic_mode: vector ampl: {mode: fixed, value: 0x2000} vector_random_ampl_normal: timeout: "20 ms" n_pat: 72000 cordic_mode: vector ampl: {mode: random, range: [0x2000, 0x3fff], seed: 123} vector_random_ampl_small: timeout: "20 ms" n_pat: 72000 cordic_mode: vector ampl: {mode: random, range: [0x0fff, 0x1fff], seed: 321} rotate_fixed_zero: timeout: "1 ms" n_pat: 3600 cordic_mode: rotate ampl: {mode: random, range: [0x1000, 0x3fff], seed: 456} theta_init: {mode: fixed, value: 0x0000} rotate_fixed_ampl: timeout: "1 ms" n_pat: 3600 cordic_mode: rotate ampl: {mode: fixed, value: 0x3fff} theta_init: {mode: random, range: [0x0000, 0xffff], seed: 789} rotate_random: timeout: "1 ms" n_pat: 3600 cordic_mode: rotate ampl: {mode: random, range: [0x1000, 0x3fff], seed: 999} theta_init: {mode: random, range: [0x0000, 0xffff], seed: 888}
<filename>temp/capi/capi_action.mmio.yaml meta: mnemonic: ACTION name: capi_action brief: MMIO-Map Action Space. doc: | Each action has a 4KB MMIO space which can be accessed from a master context or from a slave context using the address ranges as described in the respective MMIO-Map above. The following MMIO registers have dedicated meanings within the SNAP framework. When the Action gets created using Xilinx HLS the registers marked as "Reserved" are required for HLS control information. Otherwise, they may be used for any purpose. interface: port-flatten: yes fields: - base: 0x000 register-mnemonic: ACR register-name: action_control register-brief: Action Control Register. interface: port-group: control subfields: - address: 0:7 type: control mnemonic: AR name: auto_restart brief: Automatic restart. - address: 0:3 type: status mnemonic: RDY name: ready brief: Ready flag. - address: 0:2 type: status mnemonic: IDLE name: idle brief: Idle flag. - address: 0:1 type: status mnemonic: DONE name: done brief: Done flag. - address: 0:0 type: control ctrl-clear: yes mnemonic: STA name: start brief: Start flag. - base: 0x004 register-mnemonic: IER register-name: interrupt_enable register-brief: Interrupt Enable Register. interface: port-group: control subfields: - address: 0:0 type: control mnemonic: IEN name: interrupt_enable brief: Automatic restart. - address: 0x010 type: config mnemonic: ATR name: action_type brief: Action Type Register. doc: | Unique four byte number specifying the type (the implemented functionality) of this action. - address: 0x014 type: config mnemonic: AVR name: action_version brief: Action Version Register. doc: | Four byte number specifying the version of this action. - address: 0x020:7..0 type: control mnemonic: CIDR name: context_id brief: Context ID Register. interface: port-group: control doc: | Context ID to be passed with DMA and Interrupt commands to the host. - address: 0x800/11 type: axi name: user
# @package _global_ defaults: - override /do_blink/fig1a@do_blink.fig.fig1a: [] - override /do_blink/fig2a@do_blink.fig.fig2a: [] - override /do_blink/spam_filter@do_<EMAIL>.<EMAIL>.spam_filter: [] - override /do_blink/digit_recognition@do_blink.fig.digit_recognition: [] - override /do_blink/rendering@do_blink.fig.rendering: ["coloringFB_top_m"] - override /do_blink/overlay@do_blink.fig.overlay: [] - override /do_blink/rendering_4_page@do_blink.fig.rendering_4_page: [] # - override /hydra/launcher: submitit_slurm # hydra: # launcher: # cpus_per_task: 8 # mem_per_cpu: 7500mb # nodes: 1
# Site settings title: Creative Commons Labs email: <EMAIL> description: > # this means to ignore newlines until "baseurl:" Website of the Creative Commons tech team. baseurl: "" # the subpath of your site, e.g. /blog/ url: "https://labs.creativecommons.org" # the base hostname & protocol for your site twitter_username: creativecommons github_username: creativecommons # Build settings markdown: kramdown