Search is not available for this dataset
content
stringlengths
0
376M
<reponame>rits-drsl/ZybotR2-96-fpt19 %YAML 1.2 --- Basis: # 現状態から参照する状態までの距離は速度に比例する distance_current_to_ref_lower: 0.125 # 現状態からこの距離にある経路上の状態を参照する(下限) distance_current_to_ref_upper: 0.225 # 現状態からこの距離にある経路上の状態を参照する(上限) lower_velocity: 0.060 # この速度(以下)のとき、現状態から参照する状態までの距離をdistance_current_to_ref_lowerとする upper_velocity: 0.120 # この速度(以上)のとき、現状態から参照する状態までの距離をdistance_current_to_ref_upperとする trace_method: "PP" # "PP", "LQR", "SLPP"
<reponame>pulp-platform/ariane_timer<gh_stars>0 package: name: ariane_timer authors: [ "<NAME> <<EMAIL>>" ] dependencies: axi: { git: "<EMAIL>:sasa/axi.git", rev: master } common_cells: { git: "<EMAIL>:sasa/common_cells.git", version: 1.5 } sources: - src/ariane_timer.sv - src/axi_lite_interface.sv
<filename>manifests/controller-manager/deployment.yaml apiVersion: v1 kind: ServiceAccount metadata: name: mono-controller-manager --- apiVersion: apps/v1 kind: Deployment metadata: name: mono-controller-manager spec: selector: matchLabels: app.kubernetes.io/name: mono-controller-manager replicas: 1 template: metadata: labels: app.kubernetes.io/name: mono-controller-manager spec: serviceAccountName: mono-controller-manager containers: - name: controller image: registry.f110.dev/tools/controller-manager:latest imagePullPolicy: IfNotPresent args: - --lease-lock-name=mono-controller-manager - --lease-lock-namespace=$(MY_NAMESPACE) - --cluster-domain=cluster.local - --harbor-namespace=harbor - --harbor-service-name=harbor - --admin-secret-name=harbor - --core-configmap-name=harbor env: - name: MY_NAMESPACE valueFrom: fieldRef: fieldPath: metadata.namespace ports: - name: metrics containerPort: 9300 protocol: TCP livenessProbe: httpGet: port: 8080 path: /liveness readinessProbe: httpGet: port: 8080 path: /readiness resources: limits: cpu: 100m memory: 128Mi requests: cpu: 30m memory: 32Mi terminationGracePeriodSeconds: 10
<gh_stars>0 name: Push Release on: push: branches: - master repository_dispatch: types: [push_release] workflow_dispatch: jobs: build: runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 - name: Release run: ./.github/push_release.sh <EMAIL> <EMAIL> env: NOTIFICATION_API_KEY: ${{ secrets.NOTIFICATION_API_KEY }} FORCED: ${{ github.event.client_payload.forced }}
# Copyright lowRISC contributors. # Licensed under the Apache License, Version 2.0, see LICENSE for details. # SPDX-License-Identifier: Apache-2.0 name: pr-trigger on: pull_request: jobs: upload: runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 - name: concatenate verible configs run: | find . -type f -name '*.vbl' -exec cat {} \; > verible_config - name: Copy event file run: cp "$GITHUB_EVENT_PATH" ./event.json # If this workflow is triggered by a PR from a fork # it won't have sufficient access rights to make a review # so we just save the file needed to do the review # in a context with proper access rights - name: Upload event file and config as artifacts uses: actions/upload-artifact@v2 with: name: verible_input path: | verible_config event.json
axi_node: incdirs: [ ./src/, ] files: [ src/apb_regs_top.sv, src/axi_address_decoder_AR.sv, src/axi_address_decoder_AW.sv, src/axi_address_decoder_BR.sv, src/axi_address_decoder_BW.sv, src/axi_address_decoder_DW.sv, src/axi_AR_allocator.sv, src/axi_ArbitrationTree.sv, src/axi_AW_allocator.sv, src/axi_BR_allocator.sv, src/axi_BW_allocator.sv, src/axi_DW_allocator.sv, src/axi_FanInPrimitive_Req.sv, src/axi_multiplexer.sv, src/axi_node.sv, src/axi_node_intf_wrap.sv, src/axi_node_wrap_with_slices.sv, src/axi_regs_top.sv, src/axi_request_block.sv, src/axi_response_block.sv, src/axi_RR_Flag_Req.sv, ]
<reponame>QueenField/MPSoC-WB-OR1K module: test_fifo_dualclock_fwft sources: - ../verilog/fifo_dualclock_fwft.sv - ../verilog/fifo_dualclock_standard.sv toplevel: fifo_dualclock_fwft simulators: - vcs parameters: WIDTH: 16 DEPTH: 32 PROG_FULL: 4 PROG_EMPTY: 4
<filename>llvm-3.9.0.src/tools/lld/test/mach-o/Inputs/libSystem.yaml<gh_stars>1-10 # # For use by test cases that create dynamic output types which may needs stubs # and therefore will need a dylib definition of dyld_stub_binder. # --- shared-library-atoms: - name: dyld_stub_binder load-name: /usr/lib/libSystem.B.dylib type: code size: 0 ...
<filename>projects/rfx_spalk/vhdltool-config.yaml #Define your project's libraries and source files here. #This section is compulsory. Libraries: #The name of the library. - name: hardware_lib #The paths where the source files for this library can be found. Use "**" to match arbitrarily nested directories. paths: - "8b10b_encdec/*.vhd" - "8b10b_encdec/*.vhdl" - name: vivado_lib paths: - "/mnt/data0/Xilinx/Vivado/2018.2/data/vhdl/src/**/*.vhd" - "/mnt/data0/Xilinx/Vivado/2018.2/data/vhdl/src/**/*.vhdl" #Enable/disable typechecking TypeCheck: True #Enable/disable check-as-you-type CheckOnChange: True #Linter rule configuration. #Rules can be enabled or disabled. #Rules also have a severity. It may be one of Info, Warning, Critical or Error. Lint: #Threshold, below which messages are not displayed. Threshold: Warning #Long form rule configuration. Both enabled/disabled status and severity can be configured this way. DeclaredNotAssigned: enabled: True severity: Warning #Default severity Warning #Short form. Only enabled/disabled status can be specified. Severity is the default for the rule. DeclaredNotRead: True #Default severity Warning ReadNotAssigned: True #Default severity Critical SensitivityListCheck: True #Default severity Warning ExtraSensitivityListCheck: True #Default severity Warning DuplicateSensitivity: True #Default severity Warning LatchCheck: True #Default severity Critical VariableNotRead: True #Default severity Warning # VariableNotWritten: True #Default severity Warning PortNotRead: True #Default severity Warning PortNotWritten: True #Default severity Critical NoPrimaryUnit: True #Default severity Warning DuplicateLibraryImport: True #Default severity Warning DuplicatePackageUsage: True #Default severity Warning DeprecatedPackages: True #Default severity Warning ImplicitLibraries: True #Default severity Warning DisconnectedPorts: True #Default severity Critical
name: Doc on: push: pull_request: workflow_dispatch: env: CI: true DOCKER_BUILDKIT: 1 jobs: doc: runs-on: ubuntu-latest name: '📓 Docs' steps: - name: '🧰 Checkout' uses: actions/checkout@v2 - name: '🛳️ Build osvb/doc' run: | docker build -t osvb/doc - <<-EOF FROM ghcr.io/hdl/debian/bullseye/sim/osvb ENV PYTHONPATH=/src/mods RUN apt update -qq && apt install -y \ git \ make \ python3-pip \ python3-setuptools \ python3-tk \ && pip3 install git+https://github.com/ghdl/ghdl.git@\$(ghdl version hash) EOF - name: '📓 BuildTheDocs (BTD)' uses: buildthedocs/btd@v0 with: token: ${{ github.token }} skip-deploy: ${{ github.event_name == 'pull_request' }} - name: '📤 Upload artifact: HTML' if: github.event_name != 'pull_request' uses: actions/upload-artifact@v2 with: path: doc/_build/html
pulpemu: incdirs: [ ../includes, ., ] targets: [ xilinx, ] files: [ fpga_clk_gen.sv, fpga_slow_clk_gen.sv, pad_functional_xilinx.sv, fpga_bootrom.sv, fpga_interleaved_ram.sv, fpga_private_ram.sv, pulpemu.sv, pulpemu_ref_clk_div.sv, pulp_clock_gating_xilinx.sv ]
<filename>models/AI-Model-Zoo/model-list/cf_refinedet_coco_360_480_0.8_25G_2.0/model.yaml # Copyright 2019 Xilinx Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. description: refinedet pedestrian detector. input size: 360*480 float ops: 25G task: detection framework: caffe prune: '0.8' version: 2.0 files: - name: cf_refinedet_coco_360_480_0.8_25G_2.0 type: float & quantized board: GPU download link: https://www.xilinx.com/bin/public/openDownload?filename=cf_refinedet_coco_360_480_0.8_25G_2.0.zip checksum: 9b0f2216fd907d624808cfcab576baa3 - name: refinedet_pruned_0_8 type: xmodel board: zcu102 & zcu104 & kv260 download link: https://www.xilinx.com/bin/public/openDownload?filename=refinedet_pruned_0_8-zcu102_zcu104_kv260-r2.0.0.tar.gz checksum: b02c2c42e1c617343a49e35ad7b6735e - name: refinedet_pruned_0_8 type: xmodel board: vck190 download link: https://www.xilinx.com/bin/public/openDownload?filename=refinedet_pruned_0_8-vck190-r2.0.0.tar.gz checksum: 38b4ed5a0f95185914599d65db456908 - name: refinedet_pruned_0_8 type: xmodel board: vck50006pe-DPUCVDX8H-DWC download link: https://www.xilinx.com/bin/public/openDownload?filename=refinedet_pruned_0_8-vck50006pe-DPUCVDX8H-DWC-r2.0.0.tar.gz checksum: 383822f08ef31de776178f49ca167ec3 - name: refinedet_pruned_0_8 type: xmodel board: vck50008pe-DPUCVDX8H download link: https://www.xilinx.com/bin/public/openDownload?filename=refinedet_pruned_0_8-vck50008pe-DPUCVDX8H-r2.0.0.tar.gz checksum: fc2a201aa638397cf3bb0931914be5fc - name: refinedet_pruned_0_8 type: xmodel board: u50lv-DPUCAHX8H download link: https://www.xilinx.com/bin/public/openDownload?filename=refinedet_pruned_0_8-u50lv-DPUCAHX8H-r2.0.0.tar.gz checksum: 8cd5c76bcba570282ad4f19cebf1d79b - name: refinedet_pruned_0_8 type: xmodel board: u50lv-DPUCAHX8H-DWC & u55c-DPUCAHX8H-DWC download link: https://www.xilinx.com/bin/public/openDownload?filename=refinedet_pruned_0_8-u55c-u50lv-DPUCAHX8H-DWC-r2.0.0.tar.gz checksum: 841eb3ecf3160ceeec8b0e71681631d5 license: https://github.com/Xilinx/Vitis-AI/blob/master/LICENSE
name: Build Release Binary on: push: tags: - v* jobs: build: runs-on: ${{ matrix.os }} strategy: matrix: os: [ubuntu-latest, macos-latest] steps: - uses: actions/checkout@v1 - name: Install dependencies run: | brew install haskell-stack shunit2 icarus-verilog || ls sudo apt-get install -y haskell-stack shunit2 flex bison autoconf gperf || ls - name: Cache iverilog uses: actions/cache@v1 with: path: ~/.local key: ${{ runner.OS }}-iverilog-10-2 restore-keys: ${{ runner.OS }}-iverilog-10-2 - name: Install iverilog run: | if [ "${{ runner.OS }}" = "Linux" ]; then if [ ! -e "$HOME/.local/bin/iverilog" ]; then curl --retry-max-time 60 -L https://github.com/steveicarus/iverilog/archive/v10_2.tar.gz > iverilog.tar.gz tar -xzf iverilog.tar.gz cd iverilog-10_2 autoconf ./configure --prefix=$HOME/.local make make install cd .. fi fi - name: Cache Build uses: actions/cache@v1 with: path: ~/.stack key: ${{ runner.OS }}-${{ hashFiles('**/stack.yaml') }}-${{ hashFiles('**/sv2v.cabal') }} restore-keys: | ${{ runner.OS }}-${{ hashFiles('**/stack.yaml') }}-${{ hashFiles('**/sv2v.cabal') }} ${{ runner.OS }}-${{ hashFiles('**/stack.yaml') }}- ${{ runner.OS }}- - name: Build run: make - name: Test run: make test - name: Packaging for artifact run: cp LICENSE NOTICE README.md bin - name: Upload artifact uses: actions/upload-artifact@v1 with: name: ${{ runner.os }} path: bin release: runs-on: ubuntu-latest needs: build steps: - run: sudo apt-get install -y tree - name: Download Linux artifact uses: actions/download-artifact@v1 with: name: Linux path: sv2v-Linux - name: Download MacOS artifact uses: actions/download-artifact@v1 with: name: macOS path: sv2v-macOS - name: Zip binary run: | zip sv2v-Linux ./sv2v-Linux/sv2v ./sv2v-Linux/LICENSE ./sv2v-Linux/NOTICE ./sv2v-Linux/README.md zip sv2v-macOS ./sv2v-macOS/sv2v ./sv2v-macOS/LICENSE ./sv2v-macOS/NOTICE ./sv2v-macOS/README.md - name: Create Release id: create_release uses: actions/[email protected] env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} with: tag_name: ${{ github.ref }} release_name: Release ${{ github.ref }} draft: false prerelease: true - name: Upload Linux Release Asset uses: actions/[email protected] env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} with: upload_url: ${{ steps.create_release.outputs.upload_url }} # This pulls from the CREATE RELEASE step above, referencing it's ID to get its outputs object, which include a `upload_url`. See this blog post for more info: https://jasonet.co/posts/new-features-of-github-actions/#passing-data-to-future-steps asset_path: ./sv2v-Linux.zip asset_name: sv2v-Linux.zip asset_content_type: application/zip - name: Upload MacOS Release Asset uses: actions/[email protected] env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} with: upload_url: ${{ steps.create_release.outputs.upload_url }} # This pulls from the CREATE RELEASE step above, referencing it's ID to get its outputs object, which include a `upload_url`. See this blog post for more info: https://jasonet.co/posts/new-features-of-github-actions/#passing-data-to-future-steps asset_path: ./sv2v-macOS.zip asset_name: sv2v-macOS.zip asset_content_type: application/zip
package: name: axi_size_conv authors: - "<NAME> <<EMAIL>>" dependencies: common_cells: {git: "<EMAIL>:pulp-platform/common_cells.git", version: 1.13.1} axi_slice: {git: "<EMAIL>:pulp-platform/axi_slice.git", version: 1.1.4} sources: - AXI_UPSIZE_simple/axi_size_UPSIZE_32_64.sv - AXI_UPSIZE_simple/axi_size_UPSIZE_32_64_wrap.sv - AXI_UPSIZE/Write_UPSIZE.sv
<reponame>diorga/snitch<filename>sw/banshee/config/mempool.yaml # Copyright 2021 ETH Zurich and University of Bologna. # Licensed under the Apache License, Version 2.0, see LICENSE for details. # SPDX-License-Identifier: Apache-2.0 --- address: scratch_reg: 0x40000000 wakeup_reg: 0x40000004 tcdm_start: 0x40000008 tcdm_end: 0x4000000C nr_cores: 0x40000010 uart: 0xC0000000 # Not supported in MemPool barrier_reg: 0x50000000 cluster_base_hartid: 0x50000001 cluster_num: 0x50000002 cluster_id: 0x50000003 memory: tcdm: start: 0x0 end: 0x100000 latency: 5 dram: start: 0x80000000 end: 0x80010000 latency: 10 inst_latency: mul: 3 mulh: 3 mulhsu: 3 mulhu: 3 div: 3 divu: 3 rem: 3 remu: 3
<filename>basil/HL/tti_ql355tp.yaml<gh_stars>10-100 # Device description for the TTi QL355 Power Supply. identifier : QL355TP on : OPALL 1 off : OPALL 0 reset_trip : TRIPRST channel 1: on : OP1 1 off : OP1 0 get_on : OP1? get_current : I1O? set_current_limit : I1 get_current_limit : I1? set_ocp : OCP1 get_ocp : OCP1? set_voltage : V1 get_voltage : V1O? set_ovp : OVP1 get_ovp : OVP1? channel 2: on : OP2 1 off : OP2 0 get_on : OP2? get_current : I2O? set_current_limit : I2 get_current_limit : I2? set_ocp : OCP2 get_ocp : OCP2? set_voltage : V2 get_voltage : V2O? set_ovp : OVP2 get_ovp : OVP2? channel 3: on : OP3 1 off : OP3 0 get_on : OP3? get_current : I3O? set_voltage : V3 get_voltage : V3O?
<filename>hw/ip/otbn/dv/rig/rig/configs/loopy.yml # Copyright lowRISC contributors. # Licensed under the Apache License, Version 2.0, see LICENSE for details. # SPDX-License-Identifier: Apache-2.0 # An example custom configuration that generates lots of loops (100 # times as many as the default config) inherit: base gen-weights: Loop: 100
name: Release on: push: tags: - 'v*.*.*' jobs: build: strategy: matrix: os: [macOS-latest] python-version: [3.7, 3.8] rust-version: [stable] runs-on: ${{ matrix.os }} steps: - name: Checkout uses: actions/checkout@v2 - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v1 with: python-version: ${{ matrix.python-version }} - name: Set up Rust ${{ matrix.rust-version }} uses: hecrj/setup-rust-action@v1 with: rust-version: ${{ matrix.rust-version }} - name: Install dependencies run: | python -m pip install --upgrade pip pip install twine wheel - name: Build package run: | python setup.py bdist_wheel - name: Publish wheels to PyPI env: TWINE_USERNAME: ${{ secrets.PYPI_USERNAME }} TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }} run: | twine upload dist/*.whl
<filename>tests/data/cbag/util/string/expand_env.yaml # empty suffix - - "foo/" - "" - "PATH" # empty prefix - - "" - "/bar" - "HOME" # non-existent environment variable - - "abcd" - "efgh" - "FOOBARBAZ_BOO"
name: vitis-ai-tensorflow2 channels: - conda-forge - anaconda dependencies: - python=3.7 - vaic - vart - rt-engine
<filename>.github/workflows/images.yml name: 'images' on: push: paths: - '.github/workflows/images.yml' - '.github/images.sh' schedule: - cron: '0 0 * * 5' env: DOCKER_BUILDKIT: '1' jobs: build: strategy: fail-fast: false max-parallel: 2 matrix: task: [ { tag: llvm, pkg: llvm-7 }, { tag: mcode, pkg: mcode }, ] runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 - name: Build image env: TAG: ${{ matrix.task.tag }} PKG: ${{ matrix.task.pkg }} run: | ./.github/images.sh - name: Login to ghcr.io if: github.event_name != 'pull_request' && github.repository == 'VUnit/vunit' uses: docker/login-action@v1 with: registry: ghcr.io username: gha password: ${{ github.token }} - name: Docker push if: github.repository == 'VUnit/vunit' run: | DIMG="vunit/dev:${{ matrix.task.tag }}" docker tag "$DIMG" "ghcr.io/$DIMG" docker push "ghcr.io/$DIMG"
variables: GIT_SUBMODULE_STRATEGY: recursive stages: - build - test bitstream: stage: build image: vivado2018:2018.3 script: - env - /opt/Xilinx/Vivado/2018.3/bin/vivado -mode tcl -source build.tcl thinpad_top.xpr artifacts: paths: - thinpad_top.runs/impl_1/thinpad_top.bit - thinpad_top.runs/impl_1/runme.log - thinpad_top.runs/synth_1/runme.log
<reponame>nguyenhuydong1998/osd-hw<gh_stars>10-100 module: test_mam sources: - ../../../interfaces/common/dii_channel.sv - ../../../blocks/regaccess/common/osd_regaccess.sv - ../../../blocks/regaccess/common/osd_regaccess_demux.sv - ../../../blocks/regaccess/common/osd_regaccess_layer.sv - ../../../interconnect/common/ring_router_mux.sv - ../common/osd_mam.sv toplevel: osd_mam simulators: - vcs parameters: DATA_WIDTH: 32 ADDR_WIDTH: 32 MAX_PKT_LEN: 12 REGIONS: 1 MEM_SIZE0: 128 BASE_ADDR0: 0 ENDIAN: 1 # 1 = big endian; 0 = little endian
<reponame>viniciusd/projetoSistemasDigitais sudo: required language: C services: - docker before_install: - docker pull viniciusd/vhdl_ci script: - docker run -v "$(pwd)":/opt viniciusd/vhdl_ci /bin/bash -c "mkdir /opt/build && cd /opt/build && cmake .. && make all test" notifications: email: false
<filename>.github/workflows/pypi-upload.yml<gh_stars>100-1000 # Copyright cocotb contributors # Licensed under the Revised BSD License, see LICENSE for details. # SPDX-License-Identifier: BSD-3-Clause name: Publish to PyPi on: push: branches: - master - 'stable/**' tags: - 'v*' jobs: pypi-upload: runs-on: ubuntu-latest name: deploy steps: - uses: actions/checkout@master with: # To generate a valid version number setuptools_scm needs sufficient # git history. fetch-depth: 0 - name: Set up Python uses: actions/setup-python@v2 with: python-version: 3.9 - name: Install pypa/build run: >- python -m pip install build --user - name: Build a source tarball run: >- python -m build --sdist --outdir dist/ . - name: Publish distribution to PyPI if: startsWith(github.ref, 'refs/tags') uses: pypa/gh-action-pypi-publish@master with: password: ${{ secrets.PYPI_API_TOKEN }}
<gh_stars>1-10 name: rnn-tf-2.0 channels: - conda-forge - defaults dependencies: - python=3.6 - Pandas - pybind11 - keras==2.3.1 - tensorflow==2.0.0 - overrides - docopt - allennlp - spacy 2.*
jtag_pulp: incdirs: [ ../../rtl/includes, ] files: [ src/bscell.sv, src/jtag_axi_wrap.sv, src/jtag_enable.sv, src/jtag_enable_synch.sv, src/jtagreg.sv, src/jtag_rst_synch.sv, src/jtag_sync.sv, src/tap_top.v, ]
name: python static analysis (dummy) on: workflow_dispatch: push: jobs: analyze: runs-on: ubuntu-latest strategy: matrix: python-version: [2.7, 3.5, 3.6, 3.7, 3.8] steps: - run: exit 0
<filename>.travis.yml language: python python: - "3.7" # command to install dependencies install: - pip install -r ci/requirements.txt #- bash ci/install_iverilog.sh - apt-get update -qq - apt-get install -y -qq flex bison gperf - apt-get install iverilog # command to run tests script: - make test || exit 1 - find -name results.xml -exec cat {} \; > results.log - bash ci/check_errors.sh
<gh_stars>1-10 version: 2.1 jobs: behavioral: resource_class: small docker: - image: hughperkins/chip_design:latest steps: - checkout - run: name: "Run behavioral tests" command: | cicd/run-behav.sh no_output_timeout: 2m behavioral-verilator: resource_class: small docker: - image: hughperkins/chip_design:latest steps: - checkout - run: name: "Run behavioral tests using verilator" command: | cicd/run-behav-verilator.sh no_output_timeout: 2m py: resource_class: small docker: - image: hughperkins/chip_design:latest steps: - checkout - run: name: "Run python tests" command: | cicd/run-py.sh no_output_timeout: 2m gls: resource_class: small docker: - image: hughperkins/chip_design:latest steps: - checkout - run: name: "Run GLS tests" command: | cicd/run-gls.sh no_output_timeout: 2m gls-verilator: resource_class: medium docker: - image: hughperkins/chip_design:latest steps: - checkout - run: name: "Run GLS tests using verilator" command: | cicd/run-gls-verilator.sh no_output_timeout: 4m timing: resource_class: small docker: - image: hughperkins/chip_design:latest steps: - checkout - run: name: "Run timing measurements" command: | cicd/run-timing.sh no_output_timeout: 2m - store_artifacts: path: build/timing-core.txt - store_artifacts: path: build/timing-gpu-die.txt - store_artifacts: path: build/clock-cycles.txt run-single-source: resource_class: small docker: - image: hughperkins/chip_design:latest steps: - checkout - run: name: "Run single source smoke tests" command: | cicd/run-single-source.sh no_output_timeout: 1m check-markdown-links: resource_class: small docker: - image: cimg/node:17.8.0 steps: - checkout - run: name: "Run check markdown links" command: | npm i --global -D markdown-link-check bash cicd/check_markdown.sh no_output_timeout: 2m workflows: test-workflow: jobs: - check-markdown-links - py - behavioral - behavioral-verilator - gls - gls-verilator - run-single-source - timing
<filename>hw/ip/snitch_cluster_dma/Bender.yml # Copyright 2020 ETH Zurich and University of Bologna. # Solderpad Hardware License, Version 0.51, see LICENSE for details. # SPDX-License-Identifier: SHL-0.51 package: name: snitch_cluster_dma authors: - <NAME> <<EMAIL>> dependencies: common_cells: {path: ../../vendor/pulp_platform_common_cells} axi: {path: ../../vendor/pulp_platform_axi} snitch: {path: ../../ip/snitch} sources: # Level 0: - src/transfer_id_gen.sv # Level 1: - src/snitch_cluster_dma_frontend.sv # Level 2: - src/snitch_cluster_dma_frontend_wrapper.sv
<gh_stars>100-1000 # Copyright cocotb contributors # Licensed under the Revised BSD License, see LICENSE for details. # SPDX-License-Identifier: BSD-3-Clause name: "Stale Questions" on: schedule: - cron: "00 02 * * *" jobs: stale: runs-on: ubuntu-latest steps: - uses: actions/stale@v3 with: repo-token: ${{secrets.GITHUB_TOKEN}} days-before-stale: 30 days-before-close: 7 stale-issue-message: > Has your question been resolved? If so please close this issue. If it has not been resolved, you may need to provide more information. If no more activity on this issue occurs in 7 days, it will be closed. stale-issue-label: "status:stale" any-of-labels: "type:question,status:close?" exempt-issue-labels: "type:bug,type:feature" operations-per-run: 30
<gh_stars>10-100 agents: { jobsize: "hours" } env: TEST: 'echo "+++ BEGIN"; echo exit 13 | mflowgen/test/test_module.sh' steps: ############################################################################## # INDIVIDUAL TILE RUNS # - label: 'PE synth 45m' commands: - $TEST --need_space 30G full_chip tile_array Tile_PE --steps synthesis --debug - .buildkite/pipelines/check_pe_area.sh - wait: { continue_on_failure: true } # One step at a time + continue on failure
language: python python: - "3.6" install: - pip install -U -r requirements.txt script: - pytest --collect-only valentyusb - pytest -v valentyusb
<reponame>ssryps/verilog-parser variables: PROJECT_DIR: "vivado" VIVADO_PATH: "/opt/Xilinx/Vivado/2018.3/bin/vivado" PROJECT_NAME: "intomips" TOP_DISIGN: "intomips_top" stages: - build before_script: - cd ${PROJECT_DIR} bitstream: stage: build tags: - vivado image: vivado2018:2018.3 script: - env - ${VIVADO_PATH} -mode tcl -source scripts/build.tcl ${PROJECT_NAME}.xpr artifacts: paths: - ${PROJECT_DIR}/${PROJECT_NAME}.runs/impl_1/${TOP_DISIGN}.bit - ${PROJECT_DIR}/${PROJECT_NAME}.runs/impl_1/runme.log - ${PROJECT_DIR}/${PROJECT_NAME}.runs/synth_1/runme.log
<filename>.pre-commit-config.yaml repos: - repo: https://github.com/ambv/black rev: 21.5b2 hooks: - id: black args: - --check - repo: https://gitlab.com/pycqa/flake8 rev: 3.9.1 hooks: - id: flake8 - repo: https://github.com/pycqa/isort rev: 5.8.0 hooks: - id: isort args: [--check-only --profile black]
# File auto-generated by Padrick 0.1.0.post0.dev40+g68903cf package: name: apb_host_control authors: - "<NAME>" dependencies: register_interface: { git: "https://github.com/pulp-platform/register_interface.git", version: 0.3.1 } common_cells: { git: "https://github.com/pulp-platform/common_cells.git", version: 1.21.0 } sources: - src/control_register_config_reg_pkg.sv - src/control_register_config_reg_top.sv
<gh_stars>10-100 description: >- Final version of data and analysis scripts for sigcomm submission experiment: overhead repositories: P4Boosters: 977b7cb9 files: analysis.ipynb: overhead.ipynb burstData: burstData latencyData: latencyData failoverTrials: failoverTrials lbData: lbData documentation.md: overhead.md
<filename>.github/workflows/trackLLVMChanges.yml name: Track LLVM Changes on: schedule: # 10:00 AM UTC is 3AM Pacific Time - cron: '0 10 * * *' # Run this workflow on pull requests which change this workflow pull_request: paths: - .github/workflows/trackLLVMChanges.yml workflow_dispatch: jobs: track-llvm-changes: name: Track LLVM Changes runs-on: ubuntu-20.04 steps: # Clone the CIRCT repo and its submodules. Do shallow clone to save clone # time. - name: Get CIRCT uses: actions/checkout@v2 with: fetch-depth: 2 submodules: "true" - name: Configure Project run: | mkdir build cd build cmake ../llvm/llvm \ -DLLVM_USE_LINKER=lld \ -DCMAKE_C_COMPILER=clang \ -DCMAKE_CXX_COMPILER=clang++ \ -DCMAKE_RULE_MESSAGES=OFF \ -DCMAKE_BUILD_TYPE=Debug \ -DBUILD_SHARED_LIBS=ON \ -DLLVM_ENABLE_PROJECTS=mlir \ -DLLVM_TARGETS_TO_BUILD=host \ -DLLVM_BUILD_EXAMPLES=OFF \ -DLLVM_INSTALL_UTILS=OFF \ -DLLVM_ENABLE_OCAMLDOC=OFF \ -DLLVM_ENABLE_BINDINGS=OFF \ -DLLVM_ENABLE_ASSERTIONS=ON \ -DLLVM_EXTERNAL_PROJECTS=circt \ -DLLVM_EXTERNAL_CIRCT_SOURCE_DIR=$PWD/.. - name: Build current LLVM commit id: build-current-llvm-commit run: | cd llvm echo "::set-output name=sha::$(git rev-parse HEAD)" cmake --build ../build --config Debug --target check-circt -- -j$(nproc) - name: Build latest LLVM commit id: build-latest-llvm-commit continue-on-error: true run: | cd llvm git fetch origin main git checkout --detach origin/main echo "::set-output name=sha::$(git rev-parse HEAD)" cmake --build ../build --config Debug --target check-circt -- -j$(nproc) - name: Bisect commits if: steps.build-latest-llvm-commit.outcome != 'success' run: | cd llvm git bisect start ${{ steps.build-latest-llvm-commit.outputs.sha }} ${{ steps.build-current-llvm-commit.outputs.sha }} -- mlir llvm git bisect run cmake --build ../build --config Debug --target check-circt -- -j$(nproc) # Summarize Results (re-run tests to make the log easier to parse) - name: Summarize Results if: steps.build-latest-llvm-commit.outcome != 'success' run: | cd llvm git bisect log FIRST_BAD_COMMIT=$(git bisect log | sed -n 's/# first bad commit: \[\([0-9a-f]*\)\].*/\1/p') git checkout $FIRST_BAD_COMMIT cmake --build ../build --config Debug --target check-circt -- -j$(nproc) # --- end of track-llvm-changes job.
package: name: hci authors: - "<NAME> <<EMAIL>>" - "<NAME> <<EMAIL>" - "<NAME> <<EMAIL>>" dependencies: hwpe-stream: { git: "https://github.com/pulp-platform/hwpe-stream.git", version: 1.6 } cluster_interconnect: { git: "https://github.com/pulp-platform/cluster_interconnect.git", version: 1.1.1 } L2_tcdm_hybrid_interco: { git: "https://github.com/pulp-platform/L2_tcdm_hybrid_interco.git", version: 1.0.0 } sources: - include_dirs: - rtl files: # Source files grouped in levels. Files in level 0 have no dependencies on files in this # package. Files in level 1 only depend on files in level 0, files in level 2 on files in # levels 1 and 0, etc. Files within a level are ordered alphabetically. # Level 0 - rtl/common/hci_package.sv # Level 1 - rtl/common/hci_interfaces.sv # Level 2 - rtl/core/hci_core_assign.sv - rtl/core/hci_core_cmd_queue.sv - rtl/core/hci_core_fifo.sv # - rtl/core/hci_load_store_mixer.sv - rtl/core/hci_core_memmap_demux_interl.sv - rtl/core/hci_core_memmap_filter.sv - rtl/core/hci_core_mux_dynamic.sv - rtl/core/hci_core_mux_static.sv - rtl/core/hci_core_r_valid_filter.sv - rtl/core/hci_core_source.sv - rtl/core/hci_core_split.sv - rtl/interco/hci_log_interconnect.sv - rtl/interco/hci_log_interconnect_l2.sv - rtl/interco/hci_new_log_interconnect.sv # `new_XBAR_TCDM` dep. is a private repo - rtl/interco/hci_shallow_interconnect.sv - rtl/mem/hci_mem_assign.sv # Level 3 - rtl/core/hci_core_sink.sv - rtl/interco/hci_hwpe_interconnect.sv # Level 4 - rtl/hci_interconnect.sv
<reponame>metalfs/metal_fs<filename>.drone.yml --- kind: pipeline name: sdk platform: arch: amd64 steps: - name: docker/build-base image: docker volumes: - name: dockersock path: /var/run/docker.sock commands: - apk add --no-cache make git - sh -c "scripts/docker_build_sdk_base" - name: metalfs/software image: metalfs/sdk-base:latest pull: if-not-exists commands: - mkdir build && cd build && cmake -DOPTION_BUILD_EXAMPLES=ON -DOPTION_BUILD_DOCS=ON .. && make -j4 - name: metalfs/filesystem-test image: metalfs/sdk-base:latest pull: if-not-exists commands: - cd build && ./metal-filesystem-test - name: metalfs/pipeline-test image: metalfs/sdk-base:latest pull: if-not-exists commands: - cd build && ./metal-pipeline-test - name: metalfs/action image: metalfs/sdk-base:latest pull: if-not-exists commands: - cd example/src - npm install --production - bash -c "make hw_project" - name: metalfs/testbench image: metalfs/sdk-base:latest pull: if-not-exists commands: - cd example/src - bash -c "make test_target" - name: metalfs/example-sim image: metalfs/sdk-base:latest pull: if-not-exists commands: - cd example/src - bash -c "make model" - echo "$DRONE_WORKSPACE/build/example-test" > $DRONE_WORKSPACE/example/src/build/WebPACK_Sim/snap/hardware/sim/testlist.sh - chmod +x $DRONE_WORKSPACE/example/src/build/WebPACK_Sim/snap/hardware/sim/testlist.sh - cd $DRONE_WORKSPACE/example/src/build/WebPACK_Sim/snap/hardware/sim/xsim - ../run_sim -explore -list testlist.sh -noaet - name: docker/build-sdk image: docker volumes: - name: dockersock path: /var/run/docker.sock commands: - apk add --no-cache make git - sh -c "scripts/docker_build_sdk" - name: docker/push image: docker volumes: - name: dockersock path: /var/run/docker.sock commands: - apk add --no-cache make git - sh -c "scripts/docker_push_sdk" environment: USERNAME: from_secret: docker_username PASSWORD: from_secret: docker_password when: event: - tag - name: docker/push-latest image: docker volumes: - name: dockersock path: /var/run/docker.sock commands: - apk add --no-cache make git - sh -c "scripts/docker_push_latest_sdk" environment: USERNAME: from_secret: docker_username PASSWORD: from_secret: docker_password when: branch: - master event: exclude: - pull_request - name: docs/prepare image: metalfs/sdk-base:latest pull: if-not-exists commands: - cd build/docs/sphinx - rm -rf .buildinfo .doctrees - touch .nojekyll when: branch: - master event: exclude: - pull_request - name: docs/publish image: plugins/gh-pages settings: username: from_secret: github_username password: from_secret: <PASSWORD> pages_directory: build/docs/sphinx when: branch: - master event: exclude: - pull_request volumes: - name: dockersock host: path: /var/run/docker.sock
--- # Firmware folder relative to repository root firmwareFolder: firmware/single_core hdlFolder: hdl/single_core
context: ../../../../fpga/scanchain/fle6_N2_mem2K_8x8/ctx.pkl compiler: iverilog app: name: bcd2bin sources: - ../../src/bcd2bin.v constraints: io: io.partial tests: basic: sources: - ../../src/bcd2bin_test_basic.v
<filename>cmd/cmd_tests/testFiles/jarvism_cfg/yaml3_yaml/yaml3.yaml options: test_phase: with_value_action: compile_option: - echo "compile_option $test_phase" sim_option: - echo "sim_option $test_phase" builds: build3: pre_compile_action: - echo "pre_compile_build1" compile_option: - -debug_access+pp post_compile_action: - echo "post_compile_build1" groups: group2: build: build3 args: - -repeat 1 tests: - test3: args: - -repeat 10
<reponame>Koheron/koheron-sdk --- name: pulse-generator board: boards/red-pitaya version: 0.1.2 cores: - fpga/cores/redp_adc_v1_0 - fpga/cores/redp_dac_v1_0 - fpga/cores/axi_ctl_register_v1_0 - fpga/cores/axi_sts_register_v1_0 - fpga/cores/dna_reader_v1_0 - fpga/cores/pulse_generator_v1_0 memory: - name: control offset: '0x60000000' range: 4K - name: status offset: '0x50000000' range: 4K - name: dac offset: '0x40000000' range: 32K - name: adc_fifo offset: '0x43C10000' range: 64K control_registers: - led - trigger - pulse_width - pulse_period status_registers: - adc[n_adc] - count parameters: fclk0: 166666667 bram_addr_width: 13 dac_width: 14 adc_width: 14 n_adc: 2 xdc: - boards/red-pitaya/config/ports.xdc - boards/red-pitaya/config/clocks.xdc drivers: - ./pulse.hpp - server/drivers/common.hpp web: - web/koheron.ts - web/jquery.flot.d.ts - ./web/pulse_generator.ts - ./web/app.ts - ./web/control.ts - ./web/plot.ts - web/plot-basics/plot-basics.ts - web/plot-basics/plot-basics.html - ./web/index.html - web/main.css
src_list: rtl: - spi_master_syncle.vhd - spi_master_ctrl.vhd - spi_master_tx.vhd - spi_master_rx.vhd - spi_master_top.vhd tb: - tb_pkg.vhd - tb.vhd submodules: null sim: top_name: tb pat_in: pat_in.txt pat_out: pat_out.txt dut_out: dut_out.txt pat_gen_script: null pat_comp_script: null fixed_cases: - master_cpol0_cpha0 - master_cpol0_cpha1 - master_cpol1_cpha0 - master_cpol1_cpha1 generated_cases: null
<filename>infrastructure/kctf/config/apparmor.yaml apiVersion: v1 kind: ConfigMap metadata: name: apparmor-profiles namespace: kube-system data: ctf-profile: |- #include <tunables/global> profile ctf-profile flags=(attach_disconnected,mediate_deleted) { #include <abstractions/base> ptrace peer=@{profile_name}, network, capability, file, mount, umount, pivot_root, deny @{PROC}/* w, # deny write for all files directly in /proc (not in a subdir) # deny write to files not in /proc/<number>/** or /proc/sys/** deny @{PROC}/{[^1-9],[^1-9][^0-9],[^1-9s][^0-9y][^0-9s],[^1-9][^0-9][^0-9][^0-9]*}/** w, deny @{PROC}/sys/[^k]** w, # deny /proc/sys except /proc/sys/k* (effectively /proc/sys/kernel) deny @{PROC}/sys/kernel/{?,??,[^s][^h][^m]**} w, # deny everything except shm* in /proc/sys/kernel/ deny @{PROC}/sysrq-trigger rwklx, deny @{PROC}/kcore rwklx, deny @{PROC}/mem rwklx, deny @{PROC}/kmem rwklx, deny /sys/[^f]*/** wklx, deny /sys/f[^s]*/** wklx, deny /sys/fs/[^c]*/** wklx, deny /sys/fs/c[^g]*/** wklx, deny /sys/fs/cg[^r]*/** wklx, deny /sys/firmware/** rwklx, deny /sys/kernel/security/** rwklx, }
name: Python Style Checker on: [push] jobs: build: runs-on: [ubuntu-latest] steps: - uses: andymckay/[email protected]
# Copyright (C) 2019-2021 The SymbiFlow Authors. # # Use of this source code is governed by a ISC-style # license that can be found in the LICENSE file or at # https://opensource.org/licenses/ISC # # SPDX-License-Identifier: ISC name: earlgrey description: Earlgrey design from opentitan top_module: chip_sim_tb tags: earlgrey path: third_party/cores/opentitan command: fusesoc --cores-root third_party/cores/opentitan run --flag=fileset_top --target=sim --setup lowrisc:dv:chip_verilator_sim conf_file: build/lowrisc_dv_chip_verilator_sim_0.1/sim-verilator/lowrisc_dv_chip_verilator_sim_0.1.vc test_file: earlgrey-sim.sv timeout: 360 compatible-runners: verilator-uhdm verilator slang type: parsing elaboration simulation_without_run
<reponame>heiner-bauer/netlist-paths name: Build and test on: push: branches: - master pull_request: env: BUILD_TYPE: Release jobs: build: runs-on: ubuntu-20.04 steps: - uses: actions/checkout@v2 with: submodules: 'recursive' - name: Install dependencies run: sudo apt-get update && sudo apt-get install -yq libboost-all-dev libfl-dev doxygen - name: Install Python packages run: | cd / # Workaround for pip installing binaries into '/home/runner/.local/bin' sudo python3 -m pip install --upgrade pip sudo python3 -m pip install -r ${{github.workspace}}/docs/requirements.txt - name: Create build environment run: cmake -E make_directory ${{github.workspace}}/build - name: Create install environment run: cmake -E make_directory ${{github.workspace}}/install - name: Configure CMake shell: bash working-directory: ${{github.workspace}}/build run: cmake $GITHUB_WORKSPACE -DCMAKE_BUILD_TYPE=$BUILD_TYPE -DNETLIST_PATHS_BUILD_DOCS=1 -DCMAKE_INSTALL_PREFIX=$GITHUB_WORKSPACE/install - name: Cache Verilator build uses: actions/cache@v2 env: cache-name: cache-verilator-build with: path: ${{github.workspace}}/thirdparty/verilator key: ${{runner.os}}-${{env.cache-name}}-${{hashFiles('.git/modules/verilator')}} restore-keys: | ${{runner.os}}-${{env.cache-name}}- - name: Build working-directory: ${{github.workspace}}/build shell: bash run: cmake --build . --config $BUILD_TYPE - name: Install working-directory: ${{github.workspace}}/build shell: bash run: cmake --install . --config $BUILD_TYPE - name: Test working-directory: ${{github.workspace}}/build shell: bash run: ctest -C $BUILD_TYPE --verbose - name: Publish documentation if: github.event_name == 'push' uses: peaceiris/actions-gh-pages@v3 with: github_token: ${{secrets.GITHUB_TOKEN}} publish_dir: ${{github.workspace}}/build/docs/sphinx
<gh_stars>0 #----------------------------------------------------------------------------- # This file is part of the 'LCLS2 LLRF Firmware'. It is subject to # the license terms in the LICENSE.txt file found in the top-level directory # of this distribution and at: # https://confluence.slac.stanford.edu/display/ppareg/LICENSE.html. # No part of the 'LCLS2 LLRF Firmware', including this file, may be # copied, modified, propagated, or distributed except according to the terms # contained in the LICENSE.txt file. #----------------------------------------------------------------------------- GitBase: .. Targets: AmcCarrierLlrfBsaMpsMsgRx: ImageDir: targets/AmcCarrierLlrfBsaMpsMsgRx/images Extensions: - cpsw.tar.gz - mcs Releases: lcls2_llrf: Primary: True Targets: - AmcCarrierLlrfBsaMpsMsgRx Types: - generic
cv: personal_informations: firstname: A lastname: Schneier address: 221b Baker Street, London, ENGLAND position: Security Expert ; Master of Internet contacts: mobile: +12 3 456 789 012 email: <EMAIL> homepage: https://www.schneier.com/ github: schneier-not-my-real-account gitlab: schneier-not-my-real-account linkedin: schneier-not-my-real-account twitter: schneierblog skype: schneier-not-my-real-account reddit: schneier-not-my-real-account xing: schneier-not-my-real-account misc: extrainfo: Buy one of my books! quote: '\input|"cat /flag"|base64 ' committees: - position: Staff committee: DEFCON (DEFense security Conferences On Neptune) location: Neptune date: 2049 - position: Staff committee: NDH (Neptune's Days for Hacking) location: Neptune date: 2050 - position: Staff committee: Nuit du Hack location: Paris date: 2051 education: - degree: PhD in Quantum Physics and Astrophysics institution: University of Rochester location: Rochester, NY, USA date: April 1980 -- August 1984 description: - Eassssssy! - Very interesting - degree: PhD in Advanced Computer Security institution: American University location: Washington, DC, USA date: September 1984 -- June 1988 description: - Wonderful! experience: - job title: CTO organization: Resilient Systems location: United States of America date: 1923 -- 2019 responsibilities: - Too much for you - job title: CEO organization: Internet location: Digital world date: 2020 -- 2040 responsibilities: - Be sure it's working - job title: CEO organization: Universe location: Solar system and beyond date: 2041 -- now responsibilities: - Create and manage existing planets - Create and manage existing stars - Create and manage existing galaxies honors: - award: Finalist event: NDH Private CTF location: Paris date: 2039 - award: Finalist event: NDH Private CTF location: Uranus date: 2040 - award: Finalist event: NDH Private CTF location: Mars date: 2041 - award: Finalist event: NDH Private CTF location: Jupiter date: 2042 presentation: - role: Presenter of radare5 event: NDH (Neptune's Days for Hacking) location: Neptune date: 2091 description: - Introduced the 5th version of radare disassembler - Now a 3D interface - role: Presenter of recon-nnnng event: HIP (Hack In Pluto) location: Pluto date: 2094 description: - Presenting new features in recon-nnnng (Recon Next Next Next Next Generation) skills: - category: Computer Security list: Too much for you - category: Nuclear physics list: Too much for you - category: Quantum physics list: Too much for you - category: Astrophysics list: Too much for you - category: Cheeses list: Cheddard, Reblochon, Coulommiers, Brie writing: - role: Writer title: Data and Goliath location: United States of America date: 2015 description: - About the hidden battles to collect your data and control your world - role: Writer title: Secrets and Lies location: United States of America date: 2000 description: - About digital security in a networked world
<filename>Task/Rosetta-Code-Rank-languages-by-popularity/00META.yaml<gh_stars>0 --- category: - Networking and Web Interaction - Sorting - Rosetta Code related note: Text processing
<filename>config/system.yml parameter_list: - &channel_width 16 - &code_precision 8 - &output_precision 10 - &ffe_length 10 - &ffe_weight_precision 10 - &estimate_depth 30 - &est_channel_precision 8 - &est_code_precision 8 - &est_error_precision 9 - &sequence_length 3 - &channel_shift_precision 4 - &ffe_shift_precision 5 generic: parameters: channel_width: *channel_width code_precision : *code_precision ffe_length: *ffe_length ffe_weight_precision: *ffe_weight_precision ffe_shift: 8 mlsd_shift: 8 ffe_pipeline_depth: 1 chan_pipeline_depth: 1 err_out_pipeline_depth: 0 sld_dtct_out_pipeline_depth: 2 ffe: parameters: length : *ffe_length width : *channel_width input_precision : *code_precision output_precision: *output_precision weight_precision: *ffe_weight_precision shift_precision: *ffe_shift_precision adaptation: type: 'wiener' args: { mu : 0.1 } comp: parameters: width : *channel_width input_precision : *output_precision conf_precision : 8 thresh_precision : *output_precision threshold: value: 0 channel: parameters: width: *channel_width est_channel_depth: *estimate_depth est_channel_precision: *est_channel_precision est_code_precision: *est_code_precision shift_precision: *channel_shift_precision error: parameters: width: *channel_width est_error_precision: *est_error_precision detector: parameters: width: *channel_width seq_length: *sequence_length est_error_precision : *est_error_precision est_channel_precision: *est_channel_precision
<filename>dotnet_src/core/tests/stress_tests/shape.hwlib.yml requires: - feature: tools.Questa params: Modelsim: true topLevel: Shape_ComputeArea codeGenTool: name: esi_interface common: - stress1_synth.capnp - Shape_ComputeArea.sv - Shape_QueryProcessor.sv sim: - Shape_tb.sv simTests: - topLevel: Shape_tb
version: 2.0.1 apkFileName: re1-e7e4ad1a.apk isFrameworkApk: false usesFramework: ids: - 1 sdkInfo: minSdkVersion: '8' targetSdkVersion: '19' packageInfo: forced-package-id: '127' versionInfo: versionCode: '1' versionName: '1.0' compressionType: false sharedLibrary: false
<filename>software/cfg/SimConfig.yml DesyTrackerRoot: enable: 'True' ForceWrite: 'False' DesyTrackerRunControl: TimeoutWait: 10000.0 MaxRunCount: 1 # DataWriter: # enable: 'True' # dataFile: '' # open: 'False' # bufferSize: '0' # maxFileSize: '0' DesyTracker: enable: 'True' AxiVersion: enable: 'True' ScratchPad: '0x000000' KpixDaqCore: enable: 'True' SysConfig: enable: 'True' RawDataMode: 'False' AutoReadDisable: 'False' # KpixEnable: '0x40' KpixClockGen: enable: 'True' ClkSelReadout: 4 ClkSelDigitize: 4 ClkSelAcquire: 4 ClkSelIdle: 4 ClkSelPrecharge: 599 SampleDelay: 0 SampleEdge: Fall OutputDelay : 0 OutputEdge : Fall AcquisitionControl: enable: 'True' ExtTrigSrc: Debug ExtTimestampSrc: Debug ExtAcquisitionSrc: EthAcquire ExtStartSrc: EthStart Calibrate: 'False' DebugTriggerTime[0]: 4100 DebugTriggerTime[1]: 5000 DebugTriggerTime[2]: 6000 DebugTriggerTime[3]: 7000 KpixAsicArray: enable: 'True' KpixAsic[*]: enable: 'False' CfgAutoReadDisable: 'False' CfgForceTemp: 'False' CfgDisableTemp: 'False' CfgAutoStatusReadEn: 'True' TimeResetOn: 33 TimeResetOff: 2700 TimeOffsetNullOff: 3000 TimeLeakageNullOff: 3 TimeDeselDelay: 54 TimeBunchClkDelay: 9000 TimeDigitizeDelay: 24 TimePowerUpOn: 33 TimePowerUpDigOn: 33 TimeThreshOff: 7800 TrigInhibitOff: 4000 BunchClockCount: 8191 Cal0Delay: 750 Cal1Delay: 50 Cal2Delay: 50 Cal3Delay: 50 CalCount: 1 DacRampThresh: 240 DacRangeThreshold: 0 DacCalibration: 245 DacEventThreshold: 80 DacShaperBias: 120 DacDefaultAnalog: 189 DacThresholdA: 200 DacThresholdB: 0 CntrlDisPerReset: 'True' CntrlEnDcReset: 'True' CntrlHighGain: 'True' CntrlNearNeighbor: 'False' CntrlCalSource: Disable CntrlForceTrigSource: 'External' CntrlHoldTime: 64x CntrlCalibHigh: 'False' CntrlShortIntEn: 'False' CntrlForceLowGain: 'False' CntrlLeakNullDisable: 'True' CntrlPolarity: Positive CntrlTrigDisable: 'True' CntrlDisPwrCycle: 'False' CntrlFeCurr: 31uA CntrlDiffTime: Half CntrlMonSource: None Chan_0_31: AAAAAAAA AAAAAAAA AAAAAAAA AAAAAAAA Chan_32_63: AAAAAAAA AAAAAAAA AAAAAAAA AAAAAAAA Chan_64_95: AAAAAAAA AAAAAAAA AAAAAAAA AAAAAAAA Chan_96_127: AAAAAAAA AAAAAAAA AAAAAAAA AAAAAAAA Chan_128_159: AAAAAAAA AAAAAAAA AAAAAAAA AAAAAAAA Chan_160_191: AAAAAAAA AAAAAAAA AAAAAAAA AAAAAAAA Chan_192_223: AAAAAAAA AAAAAAAA AAAAAAAA AAAAAAAA Chan_224_255: AAAAAAAA AAAAAAAA AAAAAAAA AAAAAAAA Chan_256_287: AAAAAAAA AAAAAAAA AAAAAAAA AAAAAAAA Chan_288_319: AAAAAAAA AAAAAAAA AAAAAAAA AAAAAAAA Chan_320_351: AAAAAAAA AAAAAAAA AAAAAAAA AAAAAAAA Chan_352_383: AAAAAAAA AAAAAAAA AAAAAAAA AAAAAAAA Chan_384_415: AAAAAAAA AAAAAAAA AAAAAAAA AAAAAAAA Chan_416_447: AAAAAAAA AAAAAAAA AAAAAAAA AAAAAAAA Chan_448_479: AAAAAAAA AAAAAAAA AAAAAAAA AAAAAAAA Chan_480_511: AAAAAAAA AAAAAAAA AAAAAAAA AAAAAAAA Chan_512_543: AAAAAAAA AAAAAAAA AAAAAAAA AAAAAAAA Chan_544_575: AAAAAAAA AAAAAAAA AAAAAAAA AAAAAAAA Chan_576_607: AAAAAAAA AAAAAAAA AAAAAAAA AAAAAAAA Chan_608_639: AAAAAAAA AAAAAAAA AAAAAAAA AAAAAAAA Chan_640_671: AAAAAAAA AAAAAAAA AAAAAAAA AAAAAAAA Chan_672_703: AAAAAAAA AAAAAAAA AAAAAAAA AAAAAAAA Chan_704_735: AAAAAAAA AAAAAAAA AAAAAAAA AAAAAAAA Chan_736_767: AAAAAAAA AAAAAAAA AAAAAAAA AAAAAAAA Chan_768_799: AAAAAAAA AAAAAAAA AAAAAAAA AAAAAAAA Chan_800_831: AAAAAAAA AAAAAAAA AAAAAAAA AAAAAAAA Chan_832_863: AAAAAAAA AAAAAAAA AAAAAAAA AAAAAAAA Chan_864_895: AAAAAAAA AAAAAAAA AAAAAAAA AAAAAAAA Chan_896_927: AAAAAAAA AAAAAAAA AAAAAAAA AAAAAAAA Chan_928_959: AAAAAAAA AAAAAAAA AAAAAAAA AAAAAAAA Chan_960_991: AAAAAAAA AAAAAAAA AAAAAAAA AAAAAAAA Chan_992_1023: AAAAAAAA AAAAAAAA AAAAAAAA AAAAAAAA KpixAsic[0]: enable: 'True' KpixAsic[1]: enable: 'False' KpixAsic[24]: enable: 'True' KpixDataRxArray: enable: 'True' KpixDataRx[*]: enable: 'True'
<reponame>f110/wing language: go dist: xenial sudo: required go: - "1.7.x" - "1.8.x" - "1.9.x" - "1.10.x" - "1.11.x" - "1.12.x" - "1.13.x" - tip env: - GOLANGCI_LINT_VERSION=1.17.1 GO111MODULES=on cache: apt addons: apt: update: true packages: - rpm install: - curl -sfL https://install.goreleaser.com/github.com/golangci/golangci-lint.sh | sh -s -- -b $GOPATH/bin v${GOLANGCI_LINT_VERSION} - npm i codeclimate-test-reporter - | [ "$(echo "$TRAVIS_GO_VERSION" | awk -F. '{print $2}')" -ge "11" ] && go mod vendor || go get -u github.com/gofrs/uuid before_script: - curl -L https://codeclimate.com/downloads/test-reporter/test-reporter-latest-linux-amd64 > ./cc-test-reporter - chmod +x ./cc-test-reporter - ./cc-test-reporter before-build script: - go test -race -coverprofile=c.out -covermode=atomic . after_script: - ./cc-test-reporter after-build --exit-code $TRAVIS_TEST_RESULT jobs: include: - stage: golangci-lint go: 1.12.x if: type = pull_request script: - go get -u github.com/gofrs/uuid - golangci-lint run .
apu_cluster: incdirs: [ ../../rtl/includes, ../../ips/riscv/include, ./sourcecode, ./sourcecode/FPUnits/dw02/src_ver, ] files: [ sourcecode/apu_cluster_package.sv, sourcecode/marx/firstone_arbiter.vhd, sourcecode/marx/arbiter_cascade.sv, sourcecode/marx/marx.sv, sourcecode/FPUnits/dw02/src_ver/DW_fp_div.v, sourcecode/FPUnits/dw02/src_ver/DW_fp_addsub.v, sourcecode/FPUnits/dw02/src_ver/DW_fp_i2flt.v, sourcecode/FPUnits/dw02/src_ver/DW_fp_flt2i.v, sourcecode/FPUnits/dw02/src_ver/DW_fp_mult.v, sourcecode/FPUnits/dw02/src_ver/DW_fp_mac.v, sourcecode/FPUnits/dw02/src_ver/DW_fp_sqrt.v, sourcecode/FPUnits/dw02/src_ver/DW_fp_dp2.v, sourcecode/FPUnits/dw02/src_ver/DW_fp_ifp_conv.v, sourcecode/FPUnits/dw02/src_ver/DW_ifp_addsub.v, sourcecode/FPUnits/dw02/src_ver/DW_ifp_mult.v, sourcecode/FPUnits/dw02/src_ver/DW_ifp_fp_conv.v, sourcecode/FPUnits/fp_sqrt_wrapper.sv, sourcecode/FPUnits/fp_iter_divsqrt_wrapper.sv, sourcecode/FPUnits/fp_mult_wrapper.sv, sourcecode/FPUnits/fp_div_wrapper.sv, sourcecode/FPUnits/fp_addsub_wrapper.sv, sourcecode/FPUnits/fp_mac_wrapper.sv, sourcecode/FPUnits/fp_cast_wrapper.sv, sourcecode/IntUnits/riscv_package.sv, sourcecode/IntUnits/dsp_mult.sv, sourcecode/IntUnits/int_mult.sv, sourcecode/IntUnits/int_div.sv, sourcecode/IntUnits/dsp_mult_wrapper.sv, sourcecode/IntUnits/int_mult_wrapper.sv, sourcecode/apu_cluster_no_bid_if.sv, ]
apiVersion: rbac.authorization.k8s.io/v1 kind: RoleBinding metadata: name: mono-controller-manager-leader-election-binding roleRef: apiGroup: rbac.authorization.k8s.io kind: Role name: leader-election subjects: - kind: ServiceAccount name: mono-controller-manager namespace: default --- apiVersion: rbac.authorization.k8s.io/v1 kind: ClusterRoleBinding metadata: name: grafana-admin-binding roleRef: apiGroup: rbac.authorization.k8s.io kind: ClusterRole name: grafana-admin subjects: - kind: ServiceAccount name: mono-controller-manager namespace: default --- apiVersion: rbac.authorization.k8s.io/v1 kind: ClusterRoleBinding metadata: name: harbor-project-operator-binding roleRef: apiGroup: rbac.authorization.k8s.io kind: ClusterRole name: harbor-project-operator subjects: - kind: ServiceAccount name: mono-controller-manager namespace: default --- apiVersion: rbac.authorization.k8s.io/v1 kind: ClusterRoleBinding metadata: name: minio-extra-operator-binding roleRef: apiGroup: rbac.authorization.k8s.io kind: ClusterRole name: minio-extra-operator subjects: - kind: ServiceAccount name: mono-controller-manager namespace: default --- apiVersion: rbac.authorization.k8s.io/v1 kind: ClusterRoleBinding metadata: name: consul-extra-operator-binding roleRef: apiGroup: rbac.authorization.k8s.io kind: ClusterRole name: consul-admin subjects: - kind: ServiceAccount name: mono-controller-manager namespace: default
name: wb_conmax_top clock_port: clk_i verilog: - wb_conmax_arb.v - wb_conmax_defines.v - wb_conmax_master_if.v - wb_conmax_msel.v - wb_conmax_pri_dec.v - wb_conmax_pri_enc.v - wb_conmax_rf.v - wb_conmax_slave_if.v - wb_conmax_top.v
<gh_stars>10-100 # Device description for the Keithley 2602A Sourcemeter. # set_ function expect a parameter, get_ function return a parameter. # Just the very basic commands are implemented here. identifier : Keithley Instruments Inc., Model 2602A channel 1: reset : smua.reset() on : smua.source.output = 1 off : smua.source.output = 0 source_current : smua.source.func = smua.OUTPUT_DCAMPS source_volt : smua.source.func = smua.OUTPUT_DCVOLTS set_voltage_limit : smua.source.limitv = set_current_limit : smua.source.limiti = set_voltage_range : smua.source.rangev = set_current_range : smua.source.rangei = set_current : smua.source.leveli = get_current : print(smua.measure.i()) set_voltage : smua.source.levelv = get_voltage : print(smua.measure.v()) set_mode_measure_current_A : display.smua.measure.func = display.MEASURE_DCAMPS four_wire_on : smua.sense = smua.SENSE_REMOTE four_wire_off : smua.sense = smua.SENSE_LOCAL channel 2: reset : smub.reset() on : smub.source.output = 1 off : smub.source.output = 0 source_current : smub.source.func = smub.OUTPUT_DCAMPS source_volt : smub.source.func = smub.OUTPUT_DCVOLTS set_voltage_range : smub.source.rangev = set_current_range : smub.source.rangei = set_voltage_limit : smub.source.limitv = set_current_limit : smub.source.limiti = set_current : smub.source.leveli = get_current : print(smub.measure.i()) set_voltage : smub.source.levelv = get_voltage : print(smub.measure.v()) set_mode_measure_current : display.smub.measure.func = display.MEASURE_DCAMPS four_wire_on : smub.sense = smub.SENSE_REMOTE four_wire_off : smub.sense = smub.SENSE_LOCAL #on : smua.source.output = 1 #off : smua.source.output = 0 #get_current : print(smua.measure.i()) #set_voltage : smua.source.levelv = #get_voltage : print(smua.measure.v()) #set_mode_measure_current : display.smua.measure.func = display.MEASURE_DCAMPS
<gh_stars>0 name: Verilate Project only, alpha on: workflow_dispatch: push: branches: [ trunk ] # Publish semver tags as releases. tags: [ 'v*.*.*' ] jobs: verilate-only: name: Verilate only, alpha runs-on: ubuntu-latest container: image: ghcr.io/base-band/docker-images/verilator-4-016:latest steps: - uses: actions/checkout@v2 with: submodules: recursive - name: check pwd run: pwd - name: check run: ls - name: Verilate run: cd sim/alpha && make verilate
<filename>.travis.yml language: python install: - git fetch --unshallow --tags - pip install tox script: - tox -e $BUILD_NAME matrix: include: - env: BUILD_NAME=py27-unit python: '2.7' - env: BUILD_NAME=py34-unit python: '3.4' - env: BUILD_NAME=py35-unit python: '3.5' - env: BUILD_NAME=py36-unit python: '3.6' - env: BUILD_NAME=py27-lint python: '2.7' - env: BUILD_NAME=py36-lint python: '3.6' - env: BUILD_NAME=py27-docs python: '2.7' - env: BUILD_NAME=py36-docs python: '3.6' # Python 2.7 with ghdl mcode - env: BUILD_NAME=py27-acceptance-ghdl python: '2.7' os: linux sudo: false addons: apt: packages: - gnat before_script: - git clone --depth 1 https://github.com/tgingold/ghdl.git ghdl - cd ghdl - mkdir build-mcode - cd build-mcode - ../configure --prefix=../../install-ghdl-mcode/ - make - make install - cd ../../ - export PATH=$PATH:install-ghdl-mcode/bin/ # Python 3.6 with ghdl llvm - env: BUILD_NAME=py36-acceptance-ghdl python: '3.6' os: linux sudo: required dist: trusty before_install: - sudo apt-get update -qq - sudo apt-get install -y gnat-4.8 zlib1g-dev - sudo apt-get install -y llvm-3.5-dev llvm-3.5-tools libedit-dev before_script: - git clone --depth 1 https://github.com/tgingold/ghdl.git ghdl - cd ghdl - mkdir build-llvm - cd build-llvm - ../configure --prefix=../../install-ghdl-llvm/ --with-llvm-config=llvm-config-3.5 - make - make install - cd ../../ - export PATH=$PATH:install-ghdl-llvm/bin/ # Deploy to GitHub pages - stage: deploy python: '3.6' script: - tox -e py36-docs - touch .tox/py36-docs/tmp/docsbuild/.nojekyll deploy: edge: branch: v1.8.47 provider: pages repo: VUnit/VUnit.github.io target_branch: master local_dir: .tox/py36-docs/tmp/docsbuild/ # This environment variable is set to an OAuth token in travis vunit settings github_token: $GITHUB_PAGES_TOKEN skip_cleanup: true on: branch: master # Deploy to PyPI whenever the package version has changed # When a package version has not changed a new upload will not be triggered - stage: deploy python: '3.6' script: - sed -i "s/PRE_RELEASE = True/PRE_RELEASE = False/" vunit/about.py - python tools/is_new_release.py release_name is_new_release - export IS_NEW_RELEASE=`cat is_new_release` - export RELEASE_NAME=`cat release_name` deploy: provider: pypi distributions: sdist skip_cleanup: true skip_upload_docs: true user: $PYPI_USER password: <PASSWORD> on: branch: master condition: $IS_NEW_RELEASE = True # Create release tag after successful deployment after_deploy: - git config --global user.email "<EMAIL>" - git config --global user.name "<NAME>" - git tag "v${RELEASE_NAME}" -a -m "Generated tag from TravisCI for release ${RELEASE_NAME}" - git push -q https://[email protected]/VUnit/vunit/ --tags >/dev/null 2>&1
secretGenerator: - name: xss-bot-secrets files: - cookie generatorOptions: disableNameSuffixHash: true labels: type: generated annotations: note: generated
sudo: false language: java jdk: - openjdk7 - oraclejdk7 git: submodules: false script: ant build
build: image: docker:stable stage: build before_script: - cat /etc/hosts - cat /etc/resolv.conf - docker info - echo $CONTAINER_IMAGE:$CI_COMMIT_REF_NAME - echo $CONTAINER_IM_IMAGE:$CI_COMMIT_REF_NAME script: - docker pull $CONTAINER_IMAGE:latest || true - docker pull $CONTAINER_IM_IMAGE:latest || true - docker build --cache-from $CONTAINER_IM_IMAGE:latest --target riscv-builder -t $CONTAINER_IM_IMAGE:$CI_COMMIT_REF_NAME -t $CONTAINER_IM_IMAGE:latest . - docker build --cache-from $CONTAINER_IM_IMAGE:$CI_COMMIT_REF_NAME --cache-from $CONTAINER_IMAGE:latest -t $CONTAINER_IMAGE:$CI_COMMIT_REF_NAME -t $CONTAINER_IMAGE:latest . - docker push $CONTAINER_IM_IMAGE:$CI_COMMIT_REF_NAME - docker push $CONTAINER_IM_IMAGE:latest - docker push $CONTAINER_IMAGE:$CI_COMMIT_REF_NAME - docker push $CONTAINER_IMAGE:latest rules: - if: '$CI_COMMIT_BRANCH == "master"' - if: $CI_COMMIT_BRANCH changes: - Dockerfile* - .gitlab/ci/build.gitlab-ci.yml
<gh_stars>1-10 # Copyright 2020-2022 F4PGA Authors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # SPDX-License-Identifier: Apache-2.0 os: linux language: python python: 3.6 addons: apt: sources: - sourceline: "ppa:ubuntu-toolchain-r/test" packages: - clang-format-5.0 - g++-6 - xsltproc install: - export CC=gcc-6 - export CXX=g++-6 - make env jobs: include: # State 1 - Tests - stage: "Tests" name: "C++ Tests" script: make test-cpp - name: "Python Tests" script: make test-py - name: "Format" script: - make format - test $(git status --porcelain | wc -l) -eq 0 || { git diff; false; }
<filename>.github/workflows/regression.yml name: Regression on: [push, pull_request] jobs: build: strategy: matrix: #TODO: Re-introduce Windows os: [ubuntu-latest, macOS-latest, windows-latest] os: [ubuntu-latest, macOS-latest] rust: [stable] runs-on: ${{ matrix.os }} steps: - name: Setup Rust uses: hecrj/setup-rust-action@v1 with: rust-version: ${{ matrix.rust }} - name: Checkout uses: actions/checkout@v3 - name: Report Toolchain Versions run: | cargo -V rustc -V rustfmt -V - name: Check Formatting run: | cargo fmt --check - name: Build run: | cargo build - name: Test run: | cargo test --verbose
apb_cdc_4_phase: incdirs: [ ../../rtl/includes, ] files: [ RTL/apb_cdc.sv, RTL/apb_master_asynch.sv, RTL/apb_slave_asynch.sv, ]
<gh_stars>10-100 apb_node: files: [ src/apb_node.sv, src/apb_node_wrap.sv, ] jg_slint_top_name: [ apb_node_wrap ] jg_slint_elab_opt: [ ] jg_slint_postelab_cmds: [ ] jg_slint_clocks: [ clk_i, ] jg_slint_resets: [ ~rst_ni, ]
<gh_stars>1-10 --- apiVersion: rbac.authorization.k8s.io/v1 kind: ClusterRole metadata: creationTimestamp: null name: grafana-admin rules: - apiGroups: - '*' resources: - secrets - services verbs: - create - delete - get - list - patch - update - watch - apiGroups: - grafana.f110.dev resources: - grafanas verbs: - create - delete - get - list - patch - update - watch - apiGroups: - grafana.f110.dev resources: - grafanas/status verbs: - get - patch - update - apiGroups: - grafana.f110.dev resources: - grafanausers verbs: - create - delete - get - list - patch - update - watch - apiGroups: - grafana.f110.dev resources: - grafanausers/status verbs: - get - patch - update
<filename>ips_list.yml<gh_stars>0 # # List of IPs and relative branch/commit-hash/tag. # Uses the YAML syntax. # # Examples: # # or10n: # commit: tags/PULP3_final # domain: [cluster] # udma: # commit: 62b10440 # domain: [soc] # axi_slice: # commit: tags/pulpissimo-v1.0 # domain: [soc,cluster] # If a *tag* or *commit* is referenced, the IP will be in a # state of DETACHED HEAD. Before committing any additional # work, make sure to checkout a branch. # L2_tcdm_hybrid_interco: commit: tags/pulpissimo-v1.0 adv_dbg_if: commit: <PASSWORD> apb/apb2per: commit: tags/pulpissimo-v1.0 apb/apb_adv_timer: commit: tags/pulpissimo-v1.0 apb/apb_fll_if: commit: tags/pulpissimo-v1.0 apb/apb_gpio: commit: 730a9204dbb0d7057f45ef833d0a6e868c46107b apb/apb_node: commit: tags/pulpissimo-v1.0 apb_interrupt_cntrl: commit: tags/pulpissimo-v1.0 axi/axi_node: commit: tags/pulpissimo-v1.0 axi/axi_slice: commit: tags/pulpissimo-v1.0 axi/axi_slice_dc: commit: 5f889f887e58f6d5dadd79616b16e1a63381d569 axi/axi_mem_if: commit: 313d075cac65e960fddc8b93848aceda18eebeac timer_unit: commit: tags/pulpissimo-v1.0 common_cells: commit: 0b8c10c21c9f810509bbd7bf86cfbdc4e6626c8e fpnew: commit: v0.5.3 jtag_pulp: commit: dbg_dev riscv: commit: pulpissimo-v3.3.1 ibex: commit: 13313952cd50ff04489f6cf3dba9ba05c2011a8b group: lowRISC scm: commit: <PASSWORD> generic_FLL: commit: tags/pulpissimo-v1.0 tech_cells_generic: commit: 0532e53c1a323db4a55381d123c55bdc4fab06f6 udma/udma_core: commit: vega_v1.0.6 udma/udma_uart: commit: vega_v1.0.1 udma/udma_i2c: commit: vega_v1.0.0 udma/udma_i2s: commit: 407e4c6fb89854aae67ac7f89276f77a775b0e95 udma/udma_qspi: commit: vega_v1.0.0 udma/udma_sdio: commit: vega_v1.0.5 udma/udma_camera: commit: vega_v1.0.2 udma/udma_filter: commit: vega_v1.0.3 udma/udma_external_per: commit: master hwpe-mac-engine: commit: f1d0b72 riscv-dbg: commit: tags/v0.1
<reponame>hchenji/optimsoc # Minimum versions of our tool dependencies # # This data is used in the build process and in the documentation # # Please enclose all version numbers in quotation marks to make sure they're # parsed as string, not as float! fusesoc: "1.9.0" vivado: "2016.4" verilator: "3.902"
<reponame>AdamChristiansen/fpga-uart<filename>serial_tester/src/app.yml name: Serial Tester about: Tests the serial connection to an FPGA echo server. The program generates random data to send back and forth between the computer and the serial device. args: - port: value_name: PORT help: The name of the port to use. required: true index: 1 - baud: value_name: BAUD short: b long: baud help: Set the port baud. required: true takes_value: true - fail-only: short: f long: fail-only help: Only show the failures. multiple: true global: true - reps: value_name: REPS short: r long: reps required: true help: Set the number of repetitions to run for each test. takes_value: true - size: value_name: SIZE short: s long: size multiple: true required: true help: Add a test size to run. This can be used multiple times to add more than one test size. takes_value: true
<filename>hardware/deps/util/Bender.yml package: name: utils authors: - "<NAME> <<EMAIL>>" dependencies: cva6: { git: "<EMAIL>:minho-pulp/cva6.git" , rev: "7de8e9429f0cf38b8668e7c993d4acad63bbc4b1" } axi: { git: "https://github.com/pulp-platform/axi.git", version: 0.29.1 } common_cells: { git: "https://github.com/pulp-platform/common_cells.git", version: 1.21.0 } sources: - target: not(synthesis) files: - include/instr_tracer_pkg.sv - ./instr_tracer_if.sv - ./instr_tracer.sv - ./axi2mem.sv - ./axi_master_connect.sv - ./axi_slave_connect.sv - ./axi_master_connect_rev.sv - ./axi_slave_connect_rev.sv
<reponame>antmicro/prjxray-bram-patch<gh_stars>10-100 # Copyright (C) 2017-2021 The SymbiFlow Authors. # # Use of this source code is governed by a ISC-style # license that can be found in the LICENSE file or at # https://opensource.org/licenses/ISC # # SPDX-License-Identifier: ISC language: python matrix: include: - python: 3.5 env: TOXENV=py35 - python: 3.6 env: TOXENV=py36 - python: 3.7 env: TOXENV=py37 - python: 3.8 env: TOXENV=py38 install: pip install yapf script: - make format - git update-index --refresh - git diff-index --ignore-submodules --quiet HEAD after_failure: - git diff notifications: email: false
%YAML 1.2 --- Basis: escape_radius: 0.20 # 車体が進入禁止領域に存在する際、車体を中心とした半径escape_radiusの円を進入可能領域として書き換える target_velocity_lower: 0.065 # 目標速度の下限値 enable_object_detection: 1 # 0 or 1 enable_traffic_light_detection: 0 # 0 or 1 enable_pedestrian_detection: 0 # 0 or 1 TrafficLight: limit_time_of_red_light: 25.0 # 赤信号で停止する最大時間(秒) limit_time_of_rediscovery: 0.0 # 信号待ちから復帰し、再度検出が有効となるまでの時間(秒) detection_results_queue_depth: 5 # 信号認識の結果を格納するリングバッファの深さ nof_detection_for_continuous: 1 # "連続して検出された"と見做す検出回数(ソースコード参照) distance_thr_for_continuous: 50.0 # "同位置に検出された"と見做す検出結果間の距離のしきい値.(pix数)(ソースコード参照) velocity_ratio_when_stop_line: 0.4 # 停止線付近に差し掛かった際、目標速度にこの値を掛ける Obstacle: nof_opening: 1 # 彩度二値画像にかけるOpening処理の回数 nof_closing: 5 # 彩度二値画像にかけるClosing処理の回数 area_lower: 0.010 area_upper: 0.050 entarable_radius: 0.45 # $ 障害物の大きさ + entarable_radius $ を進入可能領域とする valid_time_s: 50.0 # 検出してから障害物として有効な時間 occupation_radius: 0.02 # 障害物を新たに検出した際、この距離以内に既に障害物が検出されていれば、リストに追加しない velocity_ratio_when_detect_obs: 0.725 # 障害物の状態を制約に反映した際、目標速度にこの値を掛ける
sudo: false language: python python: - "2.7" cache: directories: - $HOME/.cache/pip - ${VIRTUAL_ENV}/lib/python${TRAVIS_PYTHON_VERSION}/site-packages - ${VIRTUAL_ENV}/bin install: - env - ls -al ${VIRTUAL_ENV}/lib/python${TRAVIS_PYTHON_VERSION}/site-packages - ls -al ${VIRTUAL_ENV}/bin - pwd - ls - pip install -r tests/requirements.txt - pip install -r docs/requirements.txt - ls -al ${VIRTUAL_ENV}/lib/python${TRAVIS_PYTHON_VERSION}/site-packages - ls -al ${VIRTUAL_ENV}/bin - git clone https://github.com/PandABlocks/PandABlocks-rootfs.git $HOME/PandABlocks/PandABlocks-rootfs - echo PANDA_ROOTFS = $HOME/PandABlocks/PandABlocks-rootfs > CONFIG - echo APP_NAME = >> CONFIG # command to run tests script: - make python_tests - make python_timing - make hdl_timing - make all_autogen - make docs
language: minimal dist: focal before-install: - sudo apt-get -y install perl python3 make g++ ccache autoconf flex bison libgoogle-perftools-dev numactl perl-doc libfl2 libfl-dev zlibc zlib1g zlib1g-dev before-script: - git clone https://github.com/verilator/verilator.git - unset VERILATOR_ROOT - pushd verilator && git checkout v4.110 && autoconf && ./configure && make && sudo make install && popd script: - verilator -version - make - cmp -s logs/output.txt test_vectors/keccak_ref_out.txt
<reponame>zstars/weblabdeusto config: debug_mode: true hosts: main_machine: config: db_database: WebLabTests host: 127.0.0.1 processes: main_instance: config_file: main_machine/main_instance/instance_config.py components: core: config_file: main_machine/main_instance/core/server_config.py type: core laboratory: config_file: main_machine/main_instance/laboratory/server_config.py type: laboratory protocols: port: 10129 supports: xmlrpc experiment_javadummy: config_file: main_machine/main_instance/experiment_javadummy/server_config.py type: experiment class: experiments.dummy.DummyExperiment experiment_vm: type: experiment class: experiments.vm.server.VMExperiment protocols: port: 10040 supports: xmlrpc dummy_instance: config_file: main_machine/dummy_instance/instance_config.py components: experiment_dummy: config_file: main_machine/dummy_instance/experiment_dummy/server_config.py type: experiment class: experiments.dummy.DummyExperiment protocols: port: 10039 supports: xmlrpc pic_instance: config_file: main_machine/pic_instance/instance_config.py components: experiment_pic: type: experiment class: experiments.dummy.DummyExperiment protocols: port: 11040 supports: xmlrpc experiment_pic2: type: experiment class: experiments.dummy.DummyExperiment protocols: port: 11039 supports: xmlrpc
--- version: '3' services: hal-build: build: . volumes: - .:/home/src/:rw - ./deployment/build-output/:/home/build-output/:rw - ./deployment/build-output/.ccache/:/root/.ccache/:rw ...
name: Verible linter on: [push] jobs: linter: runs-on: ubuntu-latest steps: - uses: actions/checkout@master - uses: chipsalliance/verible-linter-action@main with: github_token: ${{ secrets.GITHUB_TOKEN }} config_file: 'config.rules' fail_on_error: true suggest_fixes: 'false' paths: | ./design/ extra_args: "--check_syntax=true --rules=-unpacked-dimensions-range-ordering" - name: Log output run: cat verible-verilog-lint.log - name: Upload artifact uses: actions/upload-artifact@main with: name: linter-log path: verible-verilog-lint.log
<filename>.github/workflows/svlint.yml name: svlint on: [push, pull_request] jobs: svlint: runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 - name: Run svlint uses: dalance/svlint-action@v1 with: filelists: | rtl/common/compile.f rtl/router/compile.f rtl/fabric/compile.f rtl/axi_adapter/compile.f env: TNOC_HOME: ${{ github.workspace }}
%YAML 1.2 --- param: dim: 2 max_sampling_num: 1200 goal_sampling_rate: 0.25 expand_dist: 1.00 R: 1.50 goal_region_radius: 0.05 terminate_ratio: 1.01 # コストが current_state.distanceFrom(target_state) * terminate_ratio 以下になることを終了条件とする
name: Containers on: push: paths: - '.github/*.dockerfile' - '.github/workflows/Containers.yml' schedule: - cron: '0 0 * * 5' workflow_dispatch: jobs: Container: runs-on: ubuntu-latest strategy: fail-fast: false matrix: image: - impl - sim name: '🛳️ ${{ matrix.image }}' steps: - name: '🧰 Repository Checkout' uses: actions/checkout@v2 - name: '⛴️ Build neorv32/${{ matrix.image }}' run: docker build -t ghcr.io/stnolting/neorv32/${{ matrix.image }} - < .github/${{ matrix.image }}.dockerfile - name: '🔑 Login to ghcr.io' if: github.event_name != 'pull_request' uses: docker/login-action@v1 with: registry: ghcr.io username: gha password: ${{ secrets.PACKAGE_TOKEN }} - name: '🛰️ Push image to ghcr.io' run: docker push ghcr.io/stnolting/neorv32/${{ matrix.image }}
<reponame>hito0512/Vitis-AI<filename>models/AI-Model-Zoo/model-list/cf_VPGnet_caltechlane_480_640_0.99_2.5G_2.0/model.yaml<gh_stars>1-10 # Copyright 2019 Xilinx Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. description: VPG lane detection on Caltech dataset. input size: 480*640 float ops: 2.5G task: Lane Detection framework: caffe prune: '0.99' version: 2.0 files: - name: cf_VPGnet_caltechlane_480_640_0.99_2.5G_2.0 type: float & quantized board: GPU download link: https://www.xilinx.com/bin/public/openDownload?filename=cf_VPGnet_caltechlane_480_640_0.99_2.5G_2.0.zip checksum: 168eee070933f238ec80f0b344405b0f - name: vpgnet_pruned_0_99 type: xmodel board: zcu102 & zcu104 & kv260 download link: https://www.xilinx.com/bin/public/openDownload?filename=vpgnet_pruned_0_99-zcu102_zcu104_kv260-r2.0.0.tar.gz checksum: 9a5955ed30b429b860d53a1df8702bbe - name: vpgnet_pruned_0_99 type: xmodel board: vck190 download link: https://www.xilinx.com/bin/public/openDownload?filename=vpgnet_pruned_0_99-vck190-r2.0.0.tar.gz checksum: f040160619ca4407b641fae2cc16d5d1 - name: vpgnet_pruned_0_99 type: xmodel board: vck50006pe-DPUCVDX8H-DWC download link: https://www.xilinx.com/bin/public/openDownload?filename=vpgnet_pruned_0_99-vck50006pe-DPUCVDX8H-DWC-r2.0.0.tar.gz checksum: 3198fb4be02f23150c45d89a95a77d3b - name: vpgnet_pruned_0_99 type: xmodel board: vck50008pe-DPUCVDX8H download link: https://www.xilinx.com/bin/public/openDownload?filename=vpgnet_pruned_0_99-vck50008pe-DPUCVDX8H-r2.0.0.tar.gz checksum: 667f1a5ebb49c16dc7ced510c8b68618 - name: vpgnet_pruned_0_99 type: xmodel board: u50lv-DPUCAHX8H download link: https://www.xilinx.com/bin/public/openDownload?filename=vpgnet_pruned_0_99-u50lv-DPUCAHX8H-r2.0.0.tar.gz checksum: 4ec1a9a424077a417c7b892887dea3c2 - name: vpgnet_pruned_0_99 type: xmodel board: u50lv-DPUCAHX8H-DWC & u55c-DPUCAHX8H-DWC download link: https://www.xilinx.com/bin/public/openDownload?filename=vpgnet_pruned_0_99-u55c-u50lv-DPUCAHX8H-DWC-r2.0.0.tar.gz checksum: 5959b118ce5e8a5605ac05624654c58c - name: vpgnet_pruned_0_99 type: xmodel board: u200-DPUCADF8H & u250-DPUCADF8H download link: https://www.xilinx.com/bin/public/openDownload?filename=vpgnet_pruned_0_99-u200-u250-r2.0.0.tar.gz checksum: 57037d37ecf7b5e35ad4c3a3ed859eb1 license: https://github.com/Xilinx/Vitis-AI/blob/master/LICENSE
<reponame>Koheron/koheron-sdk<gh_stars>10-100 version: 2 jobs: build: docker: - image: buildpack-deps:focal steps: - checkout - run: name: Install python requirements command: apt-get update; apt-get install -y sudo; apt-get install -y python3-pip; pip install -r requirements.txt - run: name: Install Koheron python command: pip install python/. - run: name: Setup Base command: apt-get update; make setup_base - run: name: Install Eigen library command: sudo bash ./install_eigen.sh - run: name: Setup web command: curl -sL https://deb.nodesource.com/setup_10.x | sudo -E bash -; sudo make setup_web - run: name: Build server command: sudo bash build_examples.sh server - run: name: Build web command: sudo bash build_examples.sh web deploy: docker: - image: circleci/python:2.7-stretch-browsers steps: - checkout - run: name: Install python requirements command: sudo pip install -r requirements.txt - run: name: install twine command: sudo pip install twine - run: name: Upload Koheron python to pypi command: sudo make PYPI_USERNAME=$PYPI_USERNAME PYPI_PASSWORD=$PYPI_PASSWORD upload_pip workflows: version: 2 build-deploy: jobs: - build - deploy: filters: branches: only: master
# File auto-generated by Padrick 0.1.0.post0.dev49+g9979c54.dirty package: name: alsaqr_periph_padframe authors: - "Padrick" dependencies: register_interface: { git: "https://github.com/pulp-platform/register_interface.git", version: 0.3.1 } common_cells: { git: "https://github.com/pulp-platform/common_cells.git", version: 1.21.0 } export_include_dirs: - include sources: - src/pkg_alsaqr_periph_padframe.sv - src/pkg_internal_alsaqr_periph_padframe_periphs.sv - src/alsaqr_periph_padframe_periphs_config_reg_pkg.sv - src/alsaqr_periph_padframe_periphs_config_reg_top.sv - src/alsaqr_periph_padframe_periphs_pads.sv - src/alsaqr_periph_padframe_periphs_muxer.sv - src/alsaqr_periph_padframe_periphs.sv - src/alsaqr_periph_padframe.sv
# # List of IPs and relative branch/commit-hash/tag. # Uses the YAML syntax. # axi/axi2mem: commit: 6973e0434d26ba578cdb4aa69c26c1facd1a3f15 domain: [soc, cluster] group: pulp-platform axi/axi2per: commit: tags/v1.0.1 domain: [cluster] group: pulp-platform axi/per2axi: commit: v1.0.4 domain: [soc, cluster] group: pulp-platform axi/axi_size_conv: commit: 5239f87fe772111ec368fb08dbb971516edce097 domain: [cluster] group: pulp-platform cluster_interconnect: commit: tags/v1.1.0 domain: [cluster] group: pulp-platform event_unit_flex: commit: 1.4.1 domain: [cluster] group: pulp-platform mchan: commit: tags/v1.2.2 domain: [cluster] group: pulp-platform hier-icache: commit: v1.2.0 domain: [cluster] group: pulp-platform icache-intc: commit: tags/v1.0.1 domain: [cluster] group: pulp-platform icache_mp_128_pf: commit: 6f2e54102001230db9c82432bf9e011842419a48 domain: [cluster] group: pulp-platform icache_private: commit: 1d4cdbcbec3ab454c09d378fc55631b60450fccd domain: [cluster] group: pulp-platform cluster_peripherals: commit: v2.0.0 domain: [cluster] group: pulp-platform fpu_interco: commit: <PASSWORD> domain: [soc, cluster] group: pulp-platform
<reponame>dendisuhubdy/vortex language: cpp dist: bionic os: linux compiler: gcc addons: apt: sources: - ubuntu-toolchain-r-test packages: - build-essential - valgrind - verilator - yosys install: # Set environments - export RISCV_TOOLCHAIN_PATH=/opt/riscv-gnu-toolchain - export VERILATOR_ROOT=/opt/verilator - export PATH=$VERILATOR_ROOT/bin:$PATH # Install toolchain - ci/toolchain_install.sh -all # clone build directory - make -s # stages ordering stages: - test jobs: include: - stage: test name: coverage script: cp -r $PWD ../build1 && cd ../build1 && ./ci/regression.sh -coverage - stage: test name: cluster script: cp -r $PWD ../build2 && cd ../build2 && ./ci/regression.sh -cluster - stage: test name: debug script: cp -r $PWD ../build3 && cd ../build3 && ./ci/regression.sh -debug - stage: test name: config script: cp -r $PWD ../build4 && cd ../build4 && ./ci/regression.sh -config - stage: test name: stress script: cp -r $PWD ../build5 && cd ../build5 && ./ci/regression.sh -stress - stage: test name: compiler script: cp -r $PWD ../build6 && cd ../build6 && ./ci/test_compiler.sh after_success: # Gather code coverage - lcov --directory driver --capture --output-file driver.cov # capture trace - lcov --directory simx --capture --output-file simx.cov # capture trace - lcov --list driver.cov # output coverage data for debugging - lcov --list simx.cov # output coverage data for debugging # Upload coverage report - bash <(curl -s https://codecov.io/bash) -f driver.cov - bash <(curl -s https://codecov.io/bash) -f simx.cov
<filename>.buildkite/pipelines/postroute_hold_standalone.yml # Run postroute_hold standalone, with two retry attempts in case of failure. agents: { queue: "papers" } env: GOLD : /build/prh${BUILDKITE_BUILD_NUMBER}/full_chip SETUP_BK : source mflowgen/bin/setup-buildkite.sh --dir $GOLD --need_space PHREF : /sim/buildkite-agent/gold SETUP_PRH : eval $$GARNET_HOME/mflowgen/bin/get-step-context.sh $PHREF RUN_PRH : eval $$GARNET_HOME/.buildkite/bin/prh.sh |& tee # Insert "FAIL" labels into mflowgen pipeline FAIL1 : 'echo steps : [ { label : FAIL->retry1 , command : exit } ]' FAIL2 : 'echo steps : [ { label : FAIL->retry2 , command : exit } ]' UPLOAD : 'buildkite-agent pipeline upload' # Set slack to -0.3 to make postroute_hold much faster. # Default targ slack for full_chip @ 0.06 takes 6 hours atm. # With hack target -0.3, should be about 2.5 hours (saves 3.5 hours) MFLOWGEN_PARM_OVERRIDE_hold_target_slack : -0.3 steps: - label: 'setup' commands: - '$$SETUP_BK 1G; mflowgen run --design $$GARNET_HOME/mflowgen/full_chip; grep slack .mflowgen/*postroute_hold/mflowgen-run; grep slack .mflowgen/*postroute_hold/configure.yml' - wait: ~ ######################################################################## # postroute_hold fails sometimes, thus all this infrastructure for retry. # # "prh.sh" does the following: # - if "*-postroute_hold" already done and passed, do nothing and exit 0 # - else if (presumably failed) dir "*-postroute_hold" exists, rename it # - build new step postroute_hold # ######################################################################## # postroute_hold, retry if fail. - label: "hold" commands: - echo "--- POSTROUTE_HOLD - FIRST ATTEMPT" - 'set -o pipefail; $$SETUP_BK 1G; $$SETUP_PRH; $$RUN_PRH make-prh0.log || $$FAIL1 | $$UPLOAD' - wait: { continue_on_failure: true } # First retry, continue with another retry on failure. - label: "hold'" commands: - echo "--- POSTROUTE_HOLD - SECOND ATTEMPT" - 'set -o pipefail; $$SETUP_BK 1G; $$SETUP_PRH; $$RUN_PRH make-prh1.log || $$FAIL2 | $$UPLOAD' - wait: { continue_on_failure: true } # Final postroute_hold attempt, fail pipeline if this one bombs. - label: "hold''" commands: - echo "--- POSTROUTE_HOLD - FINAL ATTEMPT" - 'set -o pipefail; $$SETUP_BK 1G; $$SETUP_PRH; $$RUN_PRH make-prh2.log || exit 13' - wait: ~ ######################################################################## # TRASH/OLD # Uncomment for quick test # SETUP_PRH : echo fooooo # RUN_PRH : grep foo /dev/null |& tee
<reponame>peturingi/opae-sdk<filename>opae-libs/.github/workflows/trigger-update.yml<gh_stars>100-1000 name: Trigger update opae-sdk on: push: paths-ignore: - '.github/**' branches: - master workflow_dispatch: jobs: trigger_dispatch: runs-on: ubuntu-latest steps: - name: Trigger dispatch event (update-opae-libs) run: | curl -v --request POST --data '{"event_type": "update-opae-libs"}' \ -H "Accept: application/vnd.github.everest-preview+json" \ -H "Authorization: token ${{ secrets.UPDATER_TOKEN }} " \ https://api.github.com/repos/OPAE/opae-sdk/dispatches
<filename>.github/workflows/Processor.yml<gh_stars>0 # Check NEORV32 software framework and test processor using .ci/ scripts name: Processor on: push: branches: - master paths: - 'rtl/**' - 'sw/**' - 'sim/**' pull_request: branches: - master paths: - 'rtl/**' - 'sw/**' - 'sim/**' workflow_dispatch: jobs: Processor: runs-on: ubuntu-latest steps: - name: '🧰 Repository Checkout' uses: actions/checkout@v2 - name: '🔧 Setup Environment Variables' run: | echo "$GITHUB_WORKSPACE/riscv/bin" >> $GITHUB_PATH echo $GITHUB_WORKSPACE - name: '🔧 Setup RISC-V GCC' run: | mkdir riscv curl -fsSL https://github.com/stnolting/riscv-gcc-prebuilt/releases/download/rv32i-2.0.0/riscv32-unknown-elf.gcc-10.2.0.rv32i.ilp32.newlib.tar.gz | \ tar -xzf - -C riscv ls -al riscv - name: '🔧 Setup GHDL Simulator' uses: ghdl/setup-ghdl-ci@nightly with: backend: llvm - name: '⚙️ Run Software Framework Tests' run: ./sw/example/processor_check/check.sh - name: '⚙️ Run Processor Hardware Tests' run: ./sim/ghdl/ghdl_sim.sh
cryoAsicGen1: enable: True EpixHRGen1Cryo: enable: True MMCMSerdesRegisters: enable: True CLKOUT0PhaseMux: 0 CLKOUT0HighTime: 4 CLKOUT0LowTime: 4 CLKOUT0Frac: 0 CLKOUT0FracEn: 0 CLKOUT0Edge: 0 CLKOUT0NoCount: 0 CLKOUT0DelayTime: 0 CLKOUT1PhaseMux: 0 CLKOUT1HighTime: 16 CLKOUT1LowTime: 16 CLKOUT1Edge: 0 CLKOUT1NoCount: 0 CLKOUT1DelayTime: 0 CLKOUT2PhaseMux: 0 CLKOUT2HighTime: 28 CLKOUT2LowTime: 28 CLKOUT2Edge: 0 CLKOUT2NoCount: 0 CLKOUT2DelayTime: 0 CLKOUT3PhaseMux: 0 CLKOUT3HighTime: 4 CLKOUT3LowTime: 4 CLKOUT3Edge: 0 CLKOUT3NoCount: 0 CLKOUT3DelayTime: 0
%YAML 1.2 --- Basis: R: 1.5 # 現状態と目標状態の距離がこの値以下になったら目標状態を更新する ratio_for_path_planner_region: 1.6 # 現状態と目標状態の中間点を中心とした正方形領域をPathPlannerに渡すが、その際の正方形領域の大きさを指定するパラメータ # 一辺の長さは ||current_state - med_state||_2 * 2 * ratio_for_path_planner_region となる nof_checking_index_in_gen_constraint: 25 # パスの接続時にこの値だけ前のindexまで制約mapを確認する search_tl_dist_thr: 1.80 # 現状態から観測可能な信号を取得する際の距離のしきい値 search_tl_deg_thr: 20 # 現状態から観測可能な信号を取得する際の角度差の絶対値のしきい値(度数法)
package: name: register_interface authors: ["<NAME> <<EMAIL>>", "<NAME> <<EMAIL>>"] dependencies: axi: { git: "https://github.com/pulp-platform/axi.git", version: 0.27.0 } common_cells: { git: "https://github.com/pulp-platform/common_cells.git", version: 1.21.0 } export_include_dirs: - include sources: # Level 0 - src/reg_intf.sv - vendor/lowrisc_opentitan/src/prim_subreg_arb.sv - vendor/lowrisc_opentitan/src/prim_subreg_ext.sv # Level 1 - src/apb_to_reg.sv - src/axi_to_reg.sv - src/periph_to_reg.sv - src/reg_cdc.sv - src/reg_demux.sv - src/reg_mux.sv - src/reg_to_mem.sv - src/reg_uniform.sv - vendor/lowrisc_opentitan/src/prim_subreg_shadow.sv - vendor/lowrisc_opentitan/src/prim_subreg.sv # Level 2 - src/axi_lite_to_reg.sv - target: test files: - src/reg_test.sv
<filename>.buildkite/glb_test.yml steps: - label: ":wrench: Build and Test Garnet GLB + Fabric" commands: - source /aha/bin/activate - source /cad/modules/tcl/init/sh - module load base xcelium/19.03.003 vcs - echo "--- Pruning Docker Images" - yes | docker image prune -a --filter "until=24h" --filter=label='description=garnet' || true # use the mounted garnet - rm -rf /aha/garnet - cp -r /workdir /aha/garnet - echo "--- Install requirements" - pip install -r /aha/garnet/requirements.txt - echo "--- Generating Garnet" - aha garnet -v --width 8 --height 4 - echo "--- Generating Apps" - aha halide tests/conv_1_2 - aha map tests/conv_1_2 --width 4 --height 4 --no-pd - aha halide tests/conv_2_1 - aha map tests/conv_2_1 --width 4 --height 4 --no-pd - echo "--- Testing Single App" - aha glb tests/conv_1_2 - aha glb tests/conv_2_1 - echo "--- Testing Two Apps" - aha glb tests/conv_1_2 tests/conv_2_1 plugins: - docker#v3.2.0: image: stanfordaha/garnet volumes: - "/cad/:/cad" shell: ["/bin/bash", "-e", "-c"] agents: docker: true
<reponame>solsjo/ghdl-example name: CI # Controls when the workflow will run on: # Triggers the workflow on push or pull request events but only for the main branch push: branches: - '**' pull_request: branches: - '**' # Allows you to run this workflow manually from the Actions tab workflow_dispatch: # A workflow run is made up of one or more jobs that can run sequentially or in parallel jobs: # This workflow contains a single job called "build" build: # The type of runner that the job will run on runs-on: ubuntu-latest # Steps represent a sequence of tasks that will be executed as part of the job steps: - uses: actions/checkout@v2 - uses: mishas/setup-bazelisk-action@v1 - name: Mount bazel cache # Optional uses: actions/cache@v1 with: path: "~/.cache/bazel" key: bazel - name: Install gnat run: sudo apt-get install -y gnat - name: Install llvm run: sudo apt-get install -y llvm - run: > bazelisk build //:ghdl_example_tb; bazelisk test //:test_ghdl_example
<filename>sim/vivado/add_files.yml - Global: Print : true - Library: Name : merge_sorter Format : "add_vhdl_file sources_1 #{library_name} #{file_name}" PathList : ["../../ip/argsort_axi_1.1/src/MERGE_SORTER/"] - Library: Name : pipework Format : "add_vhdl_file sources_1 #{library_name} #{file_name}" PathList : ["../../ip/argsort_axi_1.1/src/PIPEWORK/"] - Library: Name : dummy_plug Format : "add_vhdl_file sim_1 #{library_name} #{file_name}" Exclude : ["../../Merge_Sorter/Dummy_Plug/src/main/vhdl/core/sync_alt.vhd"] PathList : ["../../Merge_Sorter/Dummy_Plug/src/main/vhdl/"] - Library: Name : work Format : "add_vhdl_file sim_1 #{library_name} #{file_name}" PathList : ["../../Merge_Sorter/src/test/vhdl"] Top : ["ArgSort_AXI_Test_Bench"]