Search is not available for this dataset
content
stringlengths
0
376M
<reponame>DanielTRYTRYLOOK/RDF-2020 name: i2c_master_top clock_port: wb_clk_i verilog: - i2c_master_defines.v - i2c_master_bit_ctrl.v - i2c_master_byte_ctrl.v - i2c_master_top.v
src_list: rtl: - i2c_master_ctrl.vhd - i2c_master_data.vhd - i2c_master_top.vhd tb: - tb_pkg.vhd - tb.vhd submodules: null sim: top_name: tb pat_in: pat_in.txt pat_out: pat_out.txt dut_out: dut_out.txt pat_comp_script: null fixed_cases: - '00' pat_gen_script: null generated_cases: null
ePixHr10kT: enable: True ForceWrite: False InitAfterConfig: False EpixHR: enable: True PacketRegisters0: enable: True ResetCounters: False asicDataReq: 0x123f DisableLane: 0x0 EnumerateDisLane: 0x0 PacketRegisters1: enable: True ResetCounters: False asicDataReq: 0x123f DisableLane: 0x0 EnumerateDisLane: 0x0 PacketRegisters2: enable: True ResetCounters: False asicDataReq: 0x123f DisableLane: 0x0 EnumerateDisLane: 0x0 PacketRegisters3: enable: True ResetCounters: False asicDataReq: 0x123f DisableLane: 0x0 EnumerateDisLane: 0x0
<filename>azure-pipelines.yml # Copyright lowRISC contributors. # Licensed under the Apache License, Version 2.0, see LICENSE for details. # SPDX-License-Identifier: Apache-2.0 # Azure Pipelines CI build configuration # Documentation at https://aka.ms/yaml variables: - template: ci/vars.yml trigger: batch: true branches: include: - '*' tags: include: - '*' pr: branches: include: - '*' # Note: All tests run as part of one job to avoid copying intermediate build # artifacts around (e.g. Verilator and toolchain builds). Once more builds/tests # are added, we need to re-evaluate this decision to parallelize jobs and # improve end-to-end CI times. jobs: - job: lint_dv displayName: Run quality checks (Lint and DV) pool: vmImage: "ubuntu-18.04" steps: - bash: | ci/install-build-deps.sh displayName: Install build dependencies - bash: | echo $PATH python3 --version echo -n "fusesoc " fusesoc --version verilator --version riscv32-unknown-elf-gcc --version verible-verilog-lint --version displayName: Display environment # Verible format is experimental so only run on default config for now, # will eventually become part of the per-config CI - bash: | fusesoc --cores-root . run --no-export --target=format --tool=veribleformat lowrisc:ibex:ibex_top_tracing if [ $? != 0 ]; then echo -n "##vso[task.logissue type=error]" echo "Verilog format with Verible failed. Run 'fusesoc --cores-root . run --no-export --target=format --tool=veribleformat lowrisc:ibex:ibex_top_tracing' to check and fix all errors." echo "This flow is currently experimental and failures can be ignored." fi # Show diff of what verilog_format would have changed, and then revert. git diff git reset --hard HEAD continueOnError: true displayName: Format all source code with Verible format (experimental) - bash: | fork_origin=$(git merge-base --fork-point origin/master) changed_files=$(git diff --name-only $fork_origin | grep -v '^vendor' | grep -E '\.(cpp|cc|c|h)$') test -z "$changed_files" || git diff -U0 $fork_origin $changed_files | clang-format-diff -p1 | tee clang-format-output if [ -s clang-format-output ]; then echo -n "##vso[task.logissue type=error]" echo "C/C++ lint failed. Use 'git clang-format' with appropriate options to reformat the changed code." exit 1 fi # This check is not idempotent, but checks changes to a base branch. # Run it only on pull requests. condition: eq(variables['Build.Reason'], 'PullRequest') displayName: 'Use clang-format to check C/C++ coding style' - bash: | # Build and run CSR testbench, chosen Ibex configuration does not effect # this so doesn't need to be part of per-config CI fusesoc --cores-root=. run --target=sim --tool=verilator lowrisc:ibex:tb_cs_registers displayName: Build and run CSR testbench with Verilator - bash: | cd build git clone https://github.com/riscv/riscv-compliance.git cd riscv-compliance git checkout "$RISCV_COMPLIANCE_GIT_VERSION" displayName: Get RISC-V Compliance test suite - bash: | # Build CoreMark without performance counter dump for co-simulation testing make -C ./examples/sw/benchmarks/coremark SUPPRESS_PCOUNT_DUMP=1 displayName: Build CoreMark # Run Ibex RTL CI per supported configuration - template : ci/ibex-rtl-ci-steps.yml parameters: ibex_configs: # Note: Try to keep the list of configurations in sync with the one used # in Private CI. - small - experimental-maxperf-pmp - experimental-maxperf-pmp-bmfull - opentitan - experimental-branch-predictor # Run lint on simple system - bash: | fusesoc --cores-root . run --target=lint --tool=verilator lowrisc:ibex:ibex_simple_system if [ $? != 0 ]; then echo -n "##vso[task.logissue type=error]" echo "Verilog lint with Verilator failed. Run 'fusesoc --cores-root . run --target=lint --tool=verilator lowrisc:ibex:ibex_simple_system' to check and fix all errors." exit 1 fi displayName: Run Verilator lint on simple system - bash: | fusesoc --cores-root . run --target=lint --tool=veriblelint lowrisc:ibex:ibex_simple_system if [ $? != 0 ]; then echo -n "##vso[task.logissue type=error]" echo "Verilog lint with Verible failed. Run 'fusesoc --cores-root . run --target=lint --tool=veriblelint lowrisc:ibex:ibex_simple_system' to check and fix all errors." exit 1 fi displayName: Run Verible lint on simple system
<reponame>ka7/bat --- # Simple example id: 3595 name: <NAME> username: "testuser" other_names: ['Bob', 'Bill', 'George'] hexa: 0x11c3 #inline comment octa: 021131 lastseen: .NAN enabled: true locked: false groups: - administrators - engineering - sfa address: > 123 Alphabet Way San Francisco, CA bio: | I am a hardworking person and a member of the executive staff phone: null email: ~ building_access: yes secure_access: no bulb: On fans: Off emails: executives: - <EMAIL> - <EMAIL> supervisors: - <EMAIL>
<filename>.github/workflows/docs.yml name: docs on: push: branches: - master jobs: build: runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 - name: Build sphinx doc uses: ammaraskar/sphinx-action@master with: docs-folder: "docs/" pre-build-command: "pip install sphinxcontrib.napoleon" build-command: "make html" - name: Deploy to gh-pages uses: peaceiris/actions-gh-pages@v3 with: github_token: ${{ secrets.GITHUB_TOKEN }} personal_token: ${{ secrets.DEPLOY_TOKEN }} publish_dir: ./docs/html publish_branch: gh-pages allow_empty_commit: false keep_files: false user_name: 'github-actions[bot]' user_email: '<EMAIL>[bot]<EMAIL>' commit_message: ${{ github.event.head_commit.message }}
package: name: ariane authors: [ "<NAME> <<EMAIL>>" ] dependencies: axi: { git: "<EMAIL>:sasa/axi.git", rev: master } axi2per: { git: "<EMAIL>:pulp-open/axi2per.git", rev: master } axi_mem_if: { git: "<EMAIL>:pulp-platform/axi_mem_if.git", rev: master } axi_node: { git: "<EMAIL>:pulp-open/axi_node.git", version: v1.1.0 } axi_slice: { git: "<EMAIL>:sasa/axi_slice.git", version: 1.1.2 } tech_cells_generic: { git: "<EMAIL>:pulp-open/tech_cells_generic.git", rev: master } common_cells: { git: "<EMAIL>:sasa/common_cells.git", version: v1.7.4 } fpga-support: { git: "https://github.com/pulp-platform/fpga-support.git", version: v0.3.2 } sources: - include/riscv_pkg.sv - src/debug/dm_pkg.sv - include/ariane_pkg.sv - include/std_cache_pkg.sv - target: not(synthesis) files: - src/util/instruction_tracer_pkg.sv - src/util/instruction_tracer_if.sv - src/alu.sv - src/ariane.sv - src/branch_unit.sv - src/cache_ctrl.sv - src/commit_stage.sv - src/compressed_decoder.sv - src/controller.sv - src/csr_buffer.sv - src/csr_regfile.sv - src/decoder.sv - src/ex_stage.sv - src/frontend/btb.sv, - src/frontend/bht.sv, - src/frontend/ras.sv, - src/frontend/instr_scan.sv, - src/frontend/frontend.sv - src/icache.sv - src/id_stage.sv - src/instr_realigner.sv - src/issue_read_operands.sv - src/issue_stage.sv - src/lfsr.sv - src/load_unit.sv - src/lsu_arbiter.sv - src/lsu.sv - src/miss_handler.sv - src/mmu.sv - src/mult.sv - src/nbdcache.sv - src/vdregs.sv - src/perf_counters.sv - src/ptw.sv - src/std_cache_subsystem.sv - src/sram_wrapper.sv # - src/ariane_regfile_ff.sv - src/ariane_regfile.sv - src/re_name.sv - src/scoreboard.sv - src/store_buffer.sv - src/store_unit.sv - src/tlb.sv - src/commit_stage.sv - src/axi_adapter.sv - src/cache_subsystem/cache_ctrl.sv - src/cache_subsystem/miss_handler.sv - src/cache_subsystem/std_cache_subsystem.sv - src/cache_subsystem/std_icache.sv - src/cache_subsystem/std_nbdcache.sv - src/debug/debug_rom/debug_rom.sv - src/debug/dm_csrs.sv - src/clint/clint.sv - src/clint/axi_lite_interface.sv - src/debug/dm_mem.sv - src/debug/dm_top.sv - src/debug/dmi_cdc.sv - src/debug/dmi_jtag.sv - src/debug/dmi_jtag_tap.sv
<filename>.travis.yml language: bash install: - source ./.travis/setup.sh script: - git push <EMAIL>:build "$BRANCH_NAME" notifications: email: - <EMAIL> irc: channels: - "chat.freenode.net#hdmi2usb" - "chat.freenode.net#timvideos" template: - "[%{repository_slug}/%{branch}#%{build_number}] (%{commit}): %{message} (%{build_url})"
package: name: axi_mem_if authors: [ "<NAME> <<EMAIL>>"] dependencies: axi: { git: "<EMAIL>:sasa/axi.git", rev: master } sources: - src/axi2mem.sv - src/deprecated/axi_mem_if.sv - src/deprecated/axi_mem_if_var_latency.sv - src/deprecated/axi_mem_if_wrap.sv
<filename>src_files.yml apb_spi_master: files: [ apb_spi_master.sv, spi_master_apb_if.sv, spi_master_clkgen.sv, spi_master_controller.sv, spi_master_fifo.sv, spi_master_rx.sv, spi_master_tx.sv, ]
<filename>.github/workflows/riscv-arch-test.yml # Run the RISC-V riscv-arch-test test framework port to check current NEORV32 version name: 'riscv-arch-test' on: workflow_dispatch: jobs: build: runs-on: ubuntu-latest name: '🐧 Ubuntu-Latest' steps: - name: '🧰 Repository Checkout' uses: actions/checkout@v2 - name: '🔧 Setup Environment Variables' run: | echo "$GITHUB_WORKSPACE/riscv/bin" >> $GITHUB_PATH echo $GITHUB_WORKSPACE - name: '🔧 Setup RISC-V GCC' run: | /bin/bash -c "chmod u+x ./.ci/install.sh && ./.ci/install.sh" echo $GITHUB_WORKSPACE - name: '🔧 Setup GHDL Simulator' uses: ghdl/setup-ghdl-ci@nightly with: backend: gcc - name: '⚙️ Run RISC-V Architecture Tests' run: /bin/bash -c "chmod u+x ./riscv-arch-test/run_riscv_arch_test.sh && ./riscv-arch-test/run_riscv_arch_test.sh"
PROJECT: dt: 0.1e-6 board_name: PYNQ_Z1 plugins: ['msdsl'] emu_clk_freq: 10e6 cpu_debug_mode: true cpu_debug_hierarchies: [[0, 'top']]
# This file defines single architecture set for tlut systolic array performance projection - proj_16_16_bank4_block8 - proj_32_32_bank4_block8 - proj_64_64_bank4_block8 - proj_128_128_bank4_block8
runtime: nodejs12 service: log-me-in env_variables: DB_USER: ctf DB_PASS: "" DB_NAME: ctf INSTANCE_CONNECTION_NAME: put your instance here
<reponame>DaveMcEwan/svdata --- modules: - identifier: TestModule parameters: [] ports: - identifier: a direction: Inout datakind: Net datatype: Logic classid: ~ nettype: Wire signedness: Unsigned - identifier: b direction: Inout datakind: Net datatype: Logic classid: ~ nettype: Wire signedness: Unsigned - identifier: c direction: Inout datakind: Net datatype: Logic classid: ~ nettype: Wire signedness: Unsigned - identifier: d direction: Inout datakind: Net datatype: Int classid: ~ nettype: Wire signedness: Unsigned - identifier: e direction: Inout datakind: Net datatype: Logic classid: ~ nettype: Wire signedness: Unsigned - identifier: f direction: Inout datakind: Net datatype: Logic classid: ~ nettype: Wire signedness: Unsigned - identifier: g direction: Inout datakind: Net datatype: String classid: ~ nettype: Wire signedness: Unsigned - identifier: h direction: Inout datakind: Net datatype: Logic classid: ~ nettype: Wire signedness: Unsigned - identifier: i direction: Inout datakind: Net datatype: Integer classid: ~ nettype: Tri signedness: Unsigned filepath: testcases/sv/ansi_port_inout_net_unsigned.sv packages: []
ip: name: "uvme_mio_cli_st" version: "1.0.0 Beta 0" full-name: "Moore.io CLI Testing Grounds UVM Environment" type: "DV" sub-type: "UVM Environment" sub-sub-type: "Agent Self-Testing" description: > UVM Environment used for development of Moore.io CLI. NOT meant for use in actual DV work. home-page: "https://datum-technology-corporation.github.io/mio_cli/" repo-uri: "https://github.com/Datum-Technology-Corporation/mio_cli.git" bugs: "https://github.com/Datum-Technology-Corporation/mio_cli/issues" aliases: [] logo: "" block-diagram: "docs/env_block_diagram.svg" languages: ["sv"] simulators-supported: - { name: "vivado", version: "2021.1" } tags: [] copyright-holders: ["Datum Technology Corporation"] licenses: ["SHL-2.1"] structure: scripts-paths : [ "bin" ] docs-paths : [ "docs" ] examples-paths: [ "examples" ] src-paths : [ "src" ] dependencies: - { type: "DV", name: "uvm" , repo-uri: "https://github.com/Datum-Technology-Corporation/uvm.git" , repo-branch: "main", repo-path: "dv/uvm" } - { type: "DV", name: "uvml" , repo-uri: "https://github.com/Datum-Technology-Corporation/uvml.git" , repo-branch: "main", repo-path: "dv/uvml" } - { type: "DV", name: "uvml_logs", repo-uri: "https://github.com/Datum-Technology-Corporation/uvml_logs.git", repo-branch: "main", repo-path: "dv/uvml_logs" } - { type: "DV", name: "uvml_sb" , repo-uri: "https://github.com/Datum-Technology-Corporation/uvml_sb.git" , repo-branch: "main", repo-path: "dv/uvml_sb" } hdl-src: directories: [".", "comps", "obj", "seq"] flists: metrics : [ "src/uvme_mio_cli_st_pkg.flist" ] riviera : [ "src/uvme_mio_cli_st_pkg.flist" ] questa : [ "src/uvme_mio_cli_st_pkg.flist" ] vcs : [ "src/uvme_mio_cli_st_pkg.flist" ] vivado : [ "src/uvme_mio_cli_st_pkg.flist.xsim" ] xcelium : [ "src/uvme_mio_cli_st_pkg.flist" ]
<reponame>icgrp/doblink "3200": arch: "artix7_200t" device_family: "xc7a200t" device_name: "fig2-3200" device_speed: "ffg1156-1" device: "xc7a200t-fig2-3200-roi-virt" board: "nexys_video" timeout: 400
<filename>hw/vendor/lowrisc_opentitan/uart/Bender.yml package: name: lowrisc_uart description: "lowRISC UART" authors: ["lowRISC Contributors"] dependencies: lowrisc_prim: {path: ../prim} sources: - rtl/uart_reg_pkg.sv - rtl/uart_reg_top.sv - rtl/uart_rx.sv - rtl/uart_tx.sv - rtl/uart_core.sv - rtl/uart.sv
<filename>.github/workflows/test.yml name: Test on: push: branches: - develop pull_request: env: ARROW_VERSION: '3.0.0' jobs: cpp: name: 'C++' strategy: fail-fast: false matrix: source: - codegen/cpp/fletchgen - runtime/cpp - platforms/echo/runtime runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 - name: Install Apache Arrow run: | wget https://apache.bintray.com/arrow/$(lsb_release --id --short | tr 'A-Z' 'a-z')/apache-arrow-archive-keyring-latest-$(lsb_release --codename --short).deb sudo apt-get install -y ./apache-arrow-archive-keyring-latest-$(lsb_release --codename --short).deb sudo apt-get update sudo apt-get install -y libarrow-dev=$ARROW_VERSION-1 - name: Configure run: | cmake ${{ matrix.source }} -DBUILD_TESTS=ON -DCMAKE_BUILD_TYPE=Debug - name: Build run: | make -j - name: Test run: | make test python: name: Python strategy: fail-fast: false matrix: source: - runtime/python - codegen/python include: - source: runtime/python package: pyfletcher - source: codegen/python package: pyfletchgen runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 - name: Install Apache Arrow run: | wget https://apache.bintray.com/arrow/$(lsb_release --id --short | tr 'A-Z' 'a-z')/apache-arrow-archive-keyring-latest-$(lsb_release --codename --short).deb sudo apt-get install -y ./apache-arrow-archive-keyring-latest-$(lsb_release --codename --short).deb sudo apt-get update sudo apt-get install -y libarrow-dev=$ARROW_VERSION-1 libarrow-python-dev=$ARROW_VERSION-1 - uses: actions/setup-python@v2 with: python-version: '3.x' - name: Install pyarrow run: | python -m pip install --upgrade pip setuptools wheel python -m pip install pyarrow==$ARROW_VERSION - name: Build and install working-directory: ${{ matrix.source }} run: | python setup.py build python setup.py bdist_wheel python -m pip install build/dist/*.whl - name: Import run: python -c "import ${{ matrix.package }}" vhdl: name: VHDL runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 with: submodules: true - uses: ghdl/setup-ghdl-ci@master with: backend: llvm - uses: actions/setup-python@v2 with: python-version: '3.x' - name: Install dependencies run: | python -m pip install --upgrade pip setuptools wheel python -m pip install vhdeps - name: vhdeps run: vhdeps -i hardware ghdl -- --pattern '*_tc' --pattern ':!*/vhlib/*'
<gh_stars>100-1000 name: C/C++ CI on: push: paths-ignore: - '.github/workflows/trigger-update.yml' branches: [master] pull_request: branches: [master] paths-ignore: - '.github/workflows/trigger-update.yml' jobs: build: runs-on: ubuntu-20.04 strategy: matrix: build-type: [Coverage, Debug, Release, RelWithDebInfo] steps: - uses: actions/checkout@v2 - name: update run: sudo apt-get update -y - name: Get Packages uses: mstksg/get-package@v1 with: apt-get: uuid-dev libjson-c-dev libhwloc-dev lcov linux-headers-generic libcap-dev libudev-dev - name: CMake Configure run: cmake -B .build -DCMAKE_BUILD_TYPE=${{ matrix.build-type }} -DOPAE_ENABLE_MOCK=ON -DOPAE_BUILD_TESTS=ON - name: make run: cmake --build .build -- -j $(nproc) - name: set hugepages run: sudo sysctl -w vm.nr_hugepages=8 - name: Test in Debug mode if: ${{ matrix.build-type == 'Debug' }} working-directory: ${{ github.workspace }}/.build run: ctest --timeout 180 --rerun-failed --output-on-failure env: OPAE_EXPLICIT_INITIALIZE: 1 LD_LIBRARY_PATH: ${{ github.workspace }}/.build/lib - name: Test with Coverage if: ${{ matrix.build-type == 'Coverage' }} working-directory: ${{ github.workspace }}/.build run: make lcov env: OPAE_EXPLICIT_INITIALIZE: 1 LD_LIBRARY_PATH: ${{ github.workspace }}/.build/lib - name: Coveralls if: ${{ matrix.build-type == 'Coverage' }} uses: coverallsapp/github-action@master with: github-token: ${{ secrets.GITHUB_TOKEN }} path-to-lcov: .build/coverage.cleaned
name: CVA6 Task description: Create a CVA6-SDK Project Task title: "[TASK] <title>" labels: ["task"] body: - type: checkboxes attributes: label: Is there an existing CVA6-SDK task for this? description: Please search to see if a task issue already exists for the task you need to create options: - label: I have searched the existing task issues required: true - type: textarea attributes: label: Task Description description: A concise description of what needs to be done (user story) validations: required: true - type: textarea attributes: label: Description of Done description: What are the criteria for completion of this task? validations: required: true - type: markdown attributes: value: | **Keep task progress up to date by adding comments to the task as it progresses.**
AWSTemplateFormatVersion: '2010-09-09' Parameters: KeyPairParameter: # EC2インスタンスにSSHでログインする時のキーペアの名前 Type: String GitRepoURL: Type: String Default: https://github.com/aws/aws-fpga.git Resources: F1DevVPC: # VPCを作成して、F1インスタンス作業専用のCloud環境を作成する。 Type: AWS::EC2::VPC Properties: CidrBlock: 10.0.0.0/16 Tags: - Key: Name Value: F1DevVPC InternetGateway: # VPCとインターネットが通信できるようにGatwayを作成する Type: AWS::EC2::InternetGateway Properties: Tags: - Key: Name Value: F1DevVPC-IGW AttachGateway: # VPCとIntenetGatwayを接続する Type: AWS::EC2::VPCGatewayAttachment Properties: VpcId: !Ref F1DevVPC InternetGatewayId: !Ref InternetGateway PublicSubnet: # インターネットから接続できるpublicなサブネットを作成 Type: AWS::EC2::Subnet DependsOn: AttachGateway Properties: CidrBlock: 10.0.1.0/24 AvailabilityZone: us-west-2c MapPublicIpOnLaunch: 'true' VpcId: !Ref F1DevVPC Tags: - Key: Name Value: F1DevVPC-PublicSubnet PublicRouteTable: # インターネットへの通信用のルートテーブル(テーブルの中身は次で定義) Type: AWS::EC2::RouteTable DependsOn: AttachGateway Properties: VpcId: !Ref F1DevVPC Tags: - Key: Name Value: F1DevVPC-PublicRoute PublicRoute: # PublicSubnetからインターネットへ通信するためのルーティング定義 Type: AWS::EC2::Route DependsOn: AttachGateway Properties: RouteTableId: !Ref PublicRouteTable DestinationCidrBlock: 0.0.0.0/0 GatewayId: !Ref InternetGateway PublicSubnetRouteTableAssociation: # ルートテーブルにルーティング定義を関連付け Type: AWS::EC2::SubnetRouteTableAssociation Properties: SubnetId: !Ref PublicSubnet RouteTableId: !Ref PublicRouteTable PublicSecurityGroup: # インターネットからSSHでEC2インスタンスにアクセスする(CiderIpはもっと狭めた方がよい) Type: "AWS::EC2::SecurityGroup" Properties: GroupName: "F1DevSecurityGroup" GroupDescription: "ssh security group" VpcId: !Ref F1DevVPC SecurityGroupIngress: - IpProtocol: tcp FromPort: 22 ToPort: 22 CidrIp: "0.0.0.0/0" Tags: - Key: Name Value: "F1DevSecurityGroup" DevEC2Instance: # DCP作成等の開発用インスタンス Type: "AWS::EC2::Instance" Properties: ImageId: "ami-0209388abb64ef69c" # FPGA Developer AMI - 1.8.1 InstanceType: "z1d.2xlarge" KeyName: !Ref KeyPairParameter NetworkInterfaces: - AssociatePublicIpAddress: "true" DeviceIndex: "0" SubnetId: !Ref PublicSubnet GroupSet: - !Ref PublicSecurityGroup BlockDeviceMappings: - DeviceName: /dev/sda1 Ebs: VolumeSize: 100 VolumeType: gp2 - DeviceName: /dev/sdb NoDevice: {} UserData: Fn::Base64: !Sub | #!/bin/bash su centos -c "git clone ${GitRepoURL} /home/centos/src/project_data/aws-fpga" Tags: - Key: Name Value: "F1Dev Dev Instance" F1EC2Instance: # AFIの実行用インスタンス Type: "AWS::EC2::Instance" Properties: ImageId: "ami-0209388abb64ef69c" InstanceType: "f1.2xlarge" KeyName: !Ref KeyPairParameter NetworkInterfaces: - AssociatePublicIpAddress: "true" DeviceIndex: "0" SubnetId: !Ref PublicSubnet GroupSet: - !Ref PublicSecurityGroup BlockDeviceMappings: - DeviceName: /dev/sda1 Ebs: VolumeSize: 100 VolumeType: gp2 - DeviceName: /dev/sdb NoDevice: {} UserData: Fn::Base64: !Sub | #!/bin/bash su centos -c "git clone ${GitRepoURL} /home/centos/src/project_data/aws-fpga" Tags: - Key: Name Value: "F1Dev F1 Instance"
--- name: decimator board: boards/red-pitaya version: 0.1.1 cores: - fpga/cores/redp_adc_v1_0 - fpga/cores/redp_dac_v1_0 - fpga/cores/axi_ctl_register_v1_0 - fpga/cores/axi_sts_register_v1_0 - fpga/cores/dna_reader_v1_0 memory: - name: control offset: '0x60000000' range: 4K - name: status offset: '0x50000000' range: 4K - name: adc_fifo offset: '0x43C10000' range: 64K control_registers: - led - dac[n_dac] status_registers: - adc[n_adc] parameters: fclk0: 166666667 adc_clk: 125000000 adc_width: 14 dac_width: 14 n_adc: 2 n_dac: 2 cic_differential_delay: 1 cic_decimation_rate: 256 cic_n_stages: 6 xdc: - boards/red-pitaya/config/ports.xdc - boards/red-pitaya/config/clocks.xdc drivers: - server/drivers/common.hpp - ./decimator.hpp web: - web/index.html - web/main.css - web/koheron.ts
# Copyright 2020 ETH Zurich and University of Bologna. # Solderpad Hardware License, Version 0.51, see LICENSE for details. # SPDX-License-Identifier: SHL-0.51 package: name: reqrsp_interface authors: - <NAME> <<EMAIL>> - <NAME> <<EMAIL>> dependencies: common_cells: {path: ../../vendor/pulp_platform_common_cells} axi: {path: ../../vendor/pulp_platform_axi} export_include_dirs: - include sources: # Level 0: - src/reqrsp_pkg.sv # Level 1: - src/reqrsp_intf.sv # Level 2: - src/axi_to_reqrsp.sv - src/reqrsp_cut.sv - src/reqrsp_demux.sv - src/reqrsp_iso.sv - src/reqrsp_mux.sv - src/reqrsp_to_axi.sv - target: simulation files: - src/reqrsp_test.sv - target: test files: # Level 0 - test/axi_to_reqrsp_tb.sv - test/reqrsp_demux_tb.sv - test/reqrsp_idempotent_tb.sv - test/reqrsp_mux_tb.sv - test/reqrsp_to_axi_tb.sv
runners: AWS-runner: image: ubuntu-vitis-2020-1 jobs: build_run_sim: runner: AWS-runner type: build: hardware current_working_directory: /tools/Xilinx/Vivado/2020.1/workspace/project_ci_sim output: artifact: - ./project_ci_sim.xpr - ./project_ci_sim.cache - ./project_ci_sim.hw - ./project_ci_sim.ip_user_files - ./project_ci_sim.sim - ./project_ci_sim.srcs steps: - run: name: Download files command: | source /tools/Xilinx/Vivado/2020.1/settings64.sh vivado -mode tcl -source sim.tcl type: miscellaneous workflows: complete-build-test: jobs: - build_run_sim
# Copyright 2021 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. apiVersion: kctf.dev/v1 kind: Challenge metadata: name: letschat spec: deployed: true allowConnectTo: - letschat-loadtest - letschat-bot powDifficultySeconds: 0 network: public: true ports: - protocol: "HTTPS" targetPort: 1337 healthcheck: enabled: true --- apiVersion: networking.k8s.io/v1 kind: NetworkPolicy metadata: name: letschat-sql-access namespace: default spec: podSelector: matchLabels: app: letschat policyTypes: - Egress egress: - to: - ipBlock: cidr: 10.59.2.2/32
<reponame>BearerPipelineTest/google-ctf<gh_stars>1000+ # Human readable task name name: abc arm and amd # Long form description. description: |+ Provide a payload that prints the contents of the 'flag' file and runs on both x86-64 and arm64v8. The payload may only contain bytes in the range [0x20, 0x7F] and cannot be longer than 280 bytes. # The flag flag: CTF{abc_easy_as_svc} # Task category. (one of hw, crypto, pwn, rev, web, net, misc) category: misc # === the fields below will be filled by SRE or automation === # Task label label: '' # URL for web challenges link: '' # host/port for non-web challenges host: 'shellcode.2021.ctfcompetition.com 1337' # the URL for attachments, to be filled in by automation attachment: '' # is this challenge released? Will be set by SREs visible: false
# Copyright 2019 Xilinx Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. description: FPN(Resnet18) for multi-class joint-training segmentation on COVID19. input size: 352*352 float ops: 22.7G task: segmentation framework: pytorch prune: 'no' version: 1.4 files: - name: pt_FPN-resnet18_covid19-seg_352_352_22.7G_1.4 type: float & quantized board: GPU download link: https://www.xilinx.com/bin/public/openDownload?filename=pt_FPN-resnet18_covid19-seg_352_352_22.7G_1.4.zip checksum: cb3ed446dbbad88d1f37fc8183a8e1f8 - name: FPN-resnet18_covid19-seg_pt type: xmodel board: zcu102 & zcu104 & kv260 download link: https://www.xilinx.com/bin/public/openDownload?filename=FPN-resnet18_covid19-seg_pt-zcu102_zcu104_kv260-r1.4.0.tar.gz checksum: 3547d78e3caaeab9ed5d298c31766754 - name: FPN-resnet18_covid19-seg_pt type: xmodel board: vck190 download link: https://www.xilinx.com/bin/public/openDownload?filename=FPN-resnet18_covid19-seg_pt-vck190-r1.4.0.tar.gz checksum: a22e684c185a94eeb24def559060a717 - name: FPN-resnet18_covid19-seg_pt type: xmodel board: vck5000 download link: https://www.xilinx.com/bin/public/openDownload?filename=FPN-resnet18_covid19-seg_pt-vck5000-DPUCVDX8H-r1.4.0.tar.gz checksum: 85a73dd19f9ae62019f85ecc82ab3fe1 - name: FPN-resnet18_covid19-seg_pt type: xmodel board: u50-DPUCAHX8H & u50lv-DPUCAHX8H & u280-DPUCAHX8H download link: https://www.xilinx.com/bin/public/openDownload?filename=FPN-resnet18_covid19-seg_pt-u50-u50lv-u280-DPUCAHX8H-r1.4.0.tar.gz checksum: cff623e4bece099b6b24020da9adc1c5 - name: FPN-resnet18_covid19-seg_pt type: xmodel board: u50-DPUCAHX8L & u50lv-DPUCAHX8L & u280-DPUCAHX8L download link: https://www.xilinx.com/bin/public/openDownload?filename=FPN-resnet18_covid19-seg_pt-u50-u50lv-u280-DPUCAHX8L-r1.4.0.tar.gz checksum: 6245d163712bdbba671ed93f4486dc65 license: https://github.com/Xilinx/Vitis-AI/blob/master/LICENSE
<reponame>ess-dmsc/dmg-build-scripts --- - hosts: data-generators gather_facts: False tasks: - name: stop data generators command: "killall datagen_multigrid.bash mggen_readouts" ignore_errors: True tags: - multigrid
--- # Copyright 2021 Datum Technology Corporation # SPDX-License-Identifier: Apache-2.0 WITH SHL-2.1 ######################################################################################################################## # Licensed under the Solderpad Hardware License v 2.1 (the "License"); you may not use this file except in compliance # with the License, or, at your option, the Apache License version 2.0. You may obtain a copy of the License at # https://solderpad.org/licenses/SHL-2.1/ # Unless required by applicable law or agreed to in writing, any work distributed under the License is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. ######################################################################################################################## # Sample regression results file summary: project: my_project ip: @my_scope/[email protected] regression-name: my_regression label: null passed: False tests-passed: 201 tests-failed: 21 tests-unknown: 21 errors: 254 warnings: 475 infos: 40278 suite: name: My Test Suite path: "./tests/my_test_suite.yml" arguments: ip: ['dp=False', 'dp-width=32B'] sim: ['+NUM_PKTS=50'] custom-user-content: my-field-a: "asdsadsa" my-field-b: 232 duration: timestamp-start: 2021-06-03 timestamp-end: 2021-06-03 simulation-time: 1_560_054.293ns steps: library-creation-successful: True compilation-successful: True elaboration-successful: True tests-passed: - { name: "", seed: , label: "", fatals: 0, errors: 0, warnings: 45, infos: 4456, user-data: {}} - { name: "", seed: , label: "", fatals: 0, errors: 0, warnings: 45, infos: 4456, user-data: {}} - { name: "", seed: , label: "", fatals: 0, errors: 0, warnings: 45, infos: 4456, user-data: {}} - { name: "", seed: , label: "", fatals: 0, errors: 0, warnings: 45, infos: 4456, user-data: {}} - { name: "", seed: , label: "", fatals: 0, errors: 0, warnings: 45, infos: 4456, user-data: {}} - { name: "", seed: , label: "", fatals: 0, errors: 0, warnings: 45, infos: 4456, user-data: {}} - { name: "", seed: , label: "", fatals: 0, errors: 0, warnings: 45, infos: 4456, user-data: {}} - { name: "", seed: , label: "", fatals: 0, errors: 0, warnings: 45, infos: 4456, user-data: {}} tests-failed: - { name: "", seed: , label: "", fatals: 0, errors: 0, warnings: 45, infos: 4456, user-data: {}} - { name: "", seed: , label: "", fatals: 0, errors: 0, warnings: 45, infos: 4456, user-data: {}} tests-unknown: [] files: count: 3247 size: 4547MB compressed: False results-path: "./sim/results/my_ip/[my_regression]" mlist: "./regr.mlist" compilation-log: "./compilation.log" elaboration-log: "./elaboration.log" simulation-log: "./simulation.log" coverage-db: "./cov.ucdb" additional-logs: []
language: c compiler: gcc before_install: - sudo apt-get update -qq - sudo apt-get install -qq libpng-dev libjpeg-dev libatlas-base-dev libblas-dev libgsl0-dev script: - cd Design/Test/ # Simulatin Testing on remote centos qloud server # - cd Simulation/ && make && make clean && make test - cd UnitTest/ && make && make clean && make test
sudo: false language: java jdk: - openjdk7 - oraclejdk8 git: submodules: false script: ant build
<reponame>Koheron/koheron-sdk --- name: peak_detector board: boards/red-pitaya control_registers: - peak_address_low - peak_address_high - peak_address_reset status_registers: - peak_address - peak_maximum cores: - fpga/cores/comparator_v1_0 parameters: wfm_width: 8
<filename>.github/workflows/manylinux.yml name: Manylinux on: push: tags: - 'v*.*.*' jobs: deploy: strategy: matrix: os: [ubuntu-latest] python-version: [3.7] rust-version: [stable] runs-on: ${{ matrix.os }} steps: - name: Checkout uses: actions/checkout@v2 - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v1 with: python-version: ${{ matrix.python-version }} - name: Set up Rust ${{ matrix.rust-version }} uses: hecrj/setup-rust-action@v1 with: rust-version: ${{ matrix.rust-version }} - name: Install dependencies run: | python -m pip install --upgrade pip pip install twine - name: Build source distribution run: | python setup.py sdist - name: Build binary distributions env: DOCKER_IMAGE: quay.io/pypa/manylinux1_x86_64 PLAT: manylinux1_x86_64 PYVER: cp37-cp37m run: | docker pull $DOCKER_IMAGE docker run --rm -e PLAT=$PLAT -e PYVER=$PYVER -v `pwd`:/io $DOCKER_IMAGE /io/build-manylinux-wheels.sh - name: Publish wheels to PyPI env: TWINE_USERNAME: ${{ secrets.PYPI_USERNAME }} TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }} run: | twine upload wheelhouse/svinst*-manylinux*.whl twine upload dist/*
# RUN: lld -flavor darwin -arch x86_64 -macosx_version_min 10.8 %s -o %t -dylib %p/Inputs/libSystem.yaml && llvm-objdump -private-headers %t | FileCheck %s # RUN: lld -flavor darwin -arch x86_64 -macosx_version_min 10.8 %s -o %t -dylib %p/Inputs/libSystem.yaml -static -data_in_code_info && llvm-objdump -private-headers %t | FileCheck %s # RUN: lld -flavor darwin -arch x86_64 -macosx_version_min 10.8 %s -o %t -dylib %p/Inputs/libSystem.yaml -no_data_in_code_info && llvm-objdump -private-headers %t | FileCheck %s --check-prefix=NO_DATA_IN_CODE_INFO # RUN: lld -flavor darwin -arch x86_64 -macosx_version_min 10.8 %s -o %t -dylib %p/Inputs/libSystem.yaml -static -data_in_code_info -no_data_in_code_info && llvm-objdump -private-headers %t | FileCheck %s --check-prefix=NO_DATA_IN_CODE_INFO # RUN: lld -flavor darwin -arch x86_64 -macosx_version_min 10.8 %s -o %t -dylib %p/Inputs/libSystem.yaml -static && llvm-objdump -private-headers %t | FileCheck %s --check-prefix=NO_DATA_IN_CODE_INFO # RUN: lld -flavor darwin -arch x86_64 -macosx_version_min 10.8 %s -o %t -dylib %p/Inputs/libSystem.yaml -r && llvm-objdump -private-headers %t | FileCheck %s # RUN: lld -flavor darwin -arch x86_64 -macosx_version_min 10.8 %s -o %t -dylib %p/Inputs/libSystem.yaml -r -data_in_code_info && llvm-objdump -private-headers %t | FileCheck %s # RUN: lld -flavor darwin -arch x86_64 -macosx_version_min 10.8 %s -o %t -dylib %p/Inputs/libSystem.yaml -r -no_data_in_code_info && llvm-objdump -private-headers %t | FileCheck %s --check-prefix=NO_DATA_IN_CODE_INFO --- !mach-o arch: x86_64 file-type: MH_OBJECT flags: [ MH_SUBSECTIONS_VIA_SYMBOLS ] sections: - segment: __TEXT section: __text type: S_REGULAR attributes: [ S_ATTR_PURE_INSTRUCTIONS, S_ATTR_SOME_INSTRUCTIONS ] address: 0x0000000000000000 content: [ 0x00, 0x00, 0x00, 0x00 ] global-symbols: - name: _main type: N_SECT scope: [ N_EXT ] sect: 1 value: 0x0000000000000000 ... # CHECK: Load command {{[0-9]*}} # CHECK: cmd LC_DATA_IN_CODE # CHECK: cmdsize 16 # CHECK: dataoff # CHECK: datasize # NO_DATA_IN_CODE_INFO-NOT: LC_DATA_IN_CODE
# Copyright 2020 ETH Zurich and University of Bologna. # Solderpad Hardware License, Version 0.51, see LICENSE for details. # SPDX-License-Identifier: SHL-0.51 package: name: future dependencies: axi: {path: ../../vendor/pulp_platform_axi} common_cells: {path: ../../vendor/pulp_platform_common_cells} sources: # Level 0: - src/axi_id_remap.sv - src/axi_id_serialize.sv - src/axi_to_mem.sv - src/axi_to_mem_banked.sv - src/mem_to_axi_lite.sv - src/axi_buf.sv # Level 1: - src/axi_iw_converter.sv - src/axi_xp.sv
<reponame>Calculasians/HDC-Sensor-Fusion-Research sim.inputs.top_module: "hdc_sensor_fusion" sim.inputs.tb_dut: "dut" sim.inputs.tb_name: "hdc_sensor_fusion_tb" sim.inputs.input_files_meta: "append" sim.inputs.input_files: - "src/HDC_Sensor_Fusion_FoldedRule90/hdc_sensor_fusion.sv" - "src/HDC_Sensor_Fusion_FoldedRule90/hdc_sensor_fusion_tb.sv" - "src/HDC_Sensor_Fusion_FoldedRule90/associative_memory.sv" - "src/HDC_Sensor_Fusion_FoldedRule90/hv_binary_adder.sv" - "src/HDC_Sensor_Fusion_FoldedRule90/fuser.sv" - "src/HDC_Sensor_Fusion_FoldedRule90/spatial_encoder.sv" - "src/HDC_Sensor_Fusion_FoldedRule90/temporal_encoder.sv" - "src/HDC_Sensor_Fusion_FoldedRule90/hv_generator.sv" sim.inputs: timescale: "1ns/1ps" options: - "-notice" - "-line" - "-debug_pp" - "-debug_all" - "+v2k" - "+lint=all,noVCDE" - "+incdir+../../src/HDC_Sensor_Fusion_FoldedRule90" - "+define+CLOCK_PERIOD=3" - "-sverilog" execute_sim: true execution_flags: ["+verbose=1"]
# This file defines single architecture set for tlut systolic array performance projection - proj_16_16_bank16_block16 - proj_32_32_bank16_block16 - proj_64_64_bank16_block16 - proj_128_128_bank16_block16
<filename>rtl/generic_FLL/src_files.yml fll: flags: [ skip_synthesis, ] files : [ fe/model/gf22_DCO_model.tc.vhd, fe/model/gf22_FLL_model.vhd, fe/rtl/FLLPkg.vhd, fe/rtl/FLL_clk_divider.vhd, fe/rtl/FLL_clk_period_quantizer.vhd, fe/rtl/FLL_clock_gated.rtl.vhd, fe/rtl/FLL_digital.vhd, fe/rtl/FLL_dither_pattern_gen.vhd, fe/rtl/FLL_glitchfree_clkdiv.vhd, fe/rtl/FLL_glitchfree_clkmux.vhd, fe/rtl/FLL_mux.rtl.vhd, fe/rtl/FLL_loop_filter.vhd, fe/rtl/FLL_reg.vhd, fe/rtl/FLL_settling_monitor.vhd, fe/rtl/FLL_synchroedge.vhd, fe/rtl/FLL_zerodelta.vhd, ]
<filename>rtl/tb/src_files.yml tb: files: [ jtag_pkg.sv, dbg_pkg.sv, tb_clk_gen.sv, tb_fs_handler.sv, dev_dpi/dev_dpi.sv, tb_pulp.sv, ]
udma_uart: files: [ rtl/udma_uart_reg_if.sv, rtl/udma_uart_top.sv, rtl/udma_uart_rx.sv, rtl/udma_uart_tx.sv, ]
stages: - uhd - gnuradio - fpga .conditionals: &conditionals refs: - master - branches - web .uhd-build: &uhd-build - mkdir build - cd build - cmake ../ -DENABLE_UHD=1 -DENABLE_GNURADIO=0 - make -j10 .gnuradio-build: &gnuradio-build - mkdir build - cd build - cmake ../ -DENABLE_UHD=1 -DENABLE_GNURADIO=1 - make -j10 .fpga-script: &fpga-script - rm -rf ../uhd-fpga && git clone -b UHD-3.13 https://github.com/EttusResearch/fpga.git ../uhd-fpga - export UHD_FPGA_DIR=`pwd`/../uhd-fpga - source /opt/Xilinx/Vivado/2017.4/settings64.sh - cd fpga-rfnoc/testbenches - for d in ./*/ ; do (cd "$d" && pwd && make clean && ./runtestbench.sh); done build-uhd-master: stage: uhd image: theseuscores/uhd:master-rfnoc-all script: *uhd-build only: *conditionals build-uhd-3.13: stage: uhd image: theseuscores/uhd:UHD-3.13-rfnoc-all script: *uhd-build only: *conditionals build-uhd-3.14: stage: uhd image: theseuscores/uhd:UHD-3.14-rfnoc-all script: *uhd-build only: *conditionals build-gnuradio-maint-uhd-master: stage: gnuradio image: theseuscores/gnuradio:maint-3.7-UHD-master-rfnoc script: *gnuradio-build only: *conditionals build-gnuradio-maint-uhd-3.13: stage: gnuradio image: theseuscores/gnuradio:maint-3.7-UHD-3.13-rfnoc script: *gnuradio-build only: *conditionals build-gnuradio-maint-uhd-3.14: stage: gnuradio image: theseuscores/gnuradio:maint-3.7-UHD-3.14-rfnoc script: *gnuradio-build only: *conditionals fpga-test-merge: stage: fpga script: *fpga-script only: refs: - merge_requests changes: - fpga-rfnoc/**/* - fpga-src/**/* tags: - vivado fpga-test-master: stage: fpga script: *fpga-script only: refs: - master - web tags: - vivado
sim.inputs.top_module: "hdc_sensor_fusion" sim.inputs.tb_dut: "dut" sim.inputs.tb_name: "hdc_sensor_fusion_tb" sim.inputs.input_files_meta: "append" sim.inputs.input_files: - "src/HDC_Sensor_Fusion_SEFUAMFoldedRule90/hdc_sensor_fusion.sv" - "src/HDC_Sensor_Fusion_SEFUAMFoldedRule90/hdc_sensor_fusion_tb.sv" - "src/HDC_Sensor_Fusion_SEFUAMFoldedRule90/associative_memory.sv" - "src/HDC_Sensor_Fusion_SEFUAMFoldedRule90/hv_binary_adder.sv" - "src/HDC_Sensor_Fusion_SEFUAMFoldedRule90/fuser.sv" - "src/HDC_Sensor_Fusion_SEFUAMFoldedRule90/spatial_encoder.sv" - "src/HDC_Sensor_Fusion_SEFUAMFoldedRule90/temporal_encoder.sv" - "src/HDC_Sensor_Fusion_SEFUAMFoldedRule90/hv_generator_serial_circular.sv" sim.inputs: timescale: "1ns/1ps" options: - "-notice" - "-line" - "-debug_pp" - "-debug_all" - "+v2k" - "+lint=all,noVCDE" - "+incdir+../../src/HDC_Sensor_Fusion_SEFUAMFoldedRule90" - "+define+CLOCK_PERIOD=1100" - "-sverilog" execute_sim: true execution_flags: ["+verbose=1"]
<reponame>Calculasians/HDC-Sensor-Fusion-Research<gh_stars>0 sim.inputs.top_module: "hdc_sensor_fusion" sim.inputs.tb_dut: "dut" sim.inputs.tb_name: "hdc_sensor_fusion_tb" sim.inputs.input_files_meta: "append" sim.inputs.input_files: - "src/SEFUAM_Channel_Count_Experiment/hdc_sensor_fusion.sv" - "src/SEFUAM_Channel_Count_Experiment/hdc_sensor_fusion_tb.sv" - "src/SEFUAM_Channel_Count_Experiment/associative_memory.sv" - "src/SEFUAM_Channel_Count_Experiment/hv_binary_adder.sv" - "src/SEFUAM_Channel_Count_Experiment/fuser.sv" - "src/SEFUAM_Channel_Count_Experiment/spatial_encoder.sv" - "src/SEFUAM_Channel_Count_Experiment/temporal_encoder.sv" - "src/SEFUAM_Channel_Count_Experiment/hv_generator_serial_circular.sv" sim.inputs: timescale: "1ns/1ps" options: - "-notice" - "-line" - "-debug_pp" - "-debug_all" - "+v2k" - "+lint=all,noVCDE" - "+incdir+../../src/SEFUAM_Channel_Count_Experiment" - "+define+CLOCK_PERIOD=2512" - "-sverilog" execute_sim: true execution_flags: ["+verbose=1"]
<filename>examples/afe/clks.yaml derived_clks: tb_emu_io: abspath: 'tb_i' emu_clk: 'emu_clk' emu_rst: 'emu_rst' emu_dt: 'emu_dt' dt_req: 'dt_req' # ext_dt: # abspath: 'tb_i' # dt_req: 'ext_dt'
jobs: - smoke: variables: - GTEST_FILTER: TblinkRpcHdlTestBase.smoke - single_time_cb: variables: - GTEST_FILTER: TblinkRpcHdlTestBase.single_time_cb - dual_layer_time_cb: variables: - GTEST_FILTER: TblinkRpcHdlTestBase.dual_layer_time_cb
<reponame>Hog-CERN/Hog # Copyright 2018-2021 The University of Birmingham # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. variables: GIT_SSL_NO_VERIFY: "true" GIT_SUBMODULE_STRATEGY: recursive before_script: - export REPO=`echo $CI_PROJECT_URL | sed -e s#https://##` - git remote set-url origin https://${HOG_USER}:${HOG_PUSH_TOKEN}@$REPO - git config user.email ${HOG_EMAIL} - git config user.name ${HOG_USER} - git fetch origin $CI_COMMIT_REF_NAME - git fetch origin $CI_MERGE_REQUEST_TARGET_BRANCH_NAME - git checkout $CI_COMMIT_REF_NAME -- - export XILINXD_LICENSE_FILE=${HOG_XIL_LICENSE} - export PATH=${HOG_PATH}:$PATH - export LD_LIBRARY_PATH=${HOG_LD_LIBRARY_PATH}:$LD_LIBRARY_PATH - source Hog/Other/CommonFunctions.sh - print_hog Hog/ stages: - merge - dynamic_generate - dynamic_triggers - user_pre - generation_and_simulation - user_proj - collect - user_post - archive
############################################################################### ## Configuration file for generation of Documentation of CTU CAN FD. ## This configuration file determines content of documentation that will be ## generated. Two types are possible: ## IP (skip_conditional = false) ## ASIC (skip_conditional = true) ## ## For IP, each register in the IP core is present (regardless of top level ## generic settings) with notes that mention conditional presence of register ## based on generic values (e.g. FILTER_A_MASK dependance on sup_filt_A). ## ## For ASIC, only registers which are present when the core is used with ## generics as set in this config are present. ############################################################################### skip_conditional: false parameters: sup_filt_A: true sup_filt_B: true sup_filt_C: true sup_range: true sup_traffic_ctrs: true
sim.inputs.top_module: "hdc_sensor_fusion" sim.inputs.tb_dut: "dut" sim.inputs.tb_name: "hdc_sensor_fusion_tb" sim.inputs.input_files_meta: "append" sim.inputs.input_files: - "src/HDC_Sensor_Fusion_3M1P/hdc_sensor_fusion.sv" - "src/HDC_Sensor_Fusion_3M1P/memory_wrapper.sv" - "src/HDC_Sensor_Fusion_3M1P/hdc_sensor_fusion_tb.sv" - "src/HDC_Sensor_Fusion_3M1P/associative_memory.sv" - "src/HDC_Sensor_Fusion_3M1P/hv2000_binary_adder.sv" - "src/HDC_Sensor_Fusion_3M1P/fuser.v" - "src/HDC_Sensor_Fusion_3M1P/memory_controller.sv" - "src/HDC_Sensor_Fusion_3M1P/spatial_encoder.v" - "src/HDC_Sensor_Fusion_3M1P/temporal_encoder.v" sim.inputs: timescale: "1ps/1ps" options: - "-notice" - "-line" - "-debug_pp" - "-debug_all" - "+v2k" - "+lint=all,noVCDE" - "+incdir+../../src/HDC_Sensor_Fusion_3M1P" - "+define+CLOCK_PERIOD=0.01" - "-sverilog" execute_sim: true execution_flags: ["+verbose=1"]
--- algorithm: class: Alps population_size: 2000 elite_size: 20 max_layers: 5 aging_scheme: :fibonacci age_gap: 10 duplicate_elimination: true layer_diagnostic: true probabilities: crossover: 0.9 mutation: 0.01 init: method: grow # grow or full or random or ramped random_length: 8 sensible_depth: 6 termination: max_steps: 1000 on_individual: :stopping_condition grammar: class: Abnf::File filename: sample/ant_trail_tcc/grammar.abnf mapper: class: DepthLocus wraps_to_fail: 3 track_support_on: true selection: class: Tournament tournament_size: 3 selection_rank: class: Ranking elite_rank: class: Ranking order_by: :fitness #optional direction: :maximize #optional crossover: class: CrossoverLHS mutation: class: MutationNodal store: class: Store filename: ./ant_tcc_alps.store report: class: PopulationReport report_diversity: false report_statistics: true report_histogram: false individual: class: PipedIndividual shorten_chromozome: true _weak_pareto: :fitness: maximize :used_length: minimize _pipe_output: - :fitness: to_i _thresholds: :fitness: 89 _mark_phenotype: "\nMARKER\n" evaluator: class: WorkPipes commands: - 'ruby sample/ant_trail_tcc/ant_pipe.rb ONE sample/ant_trail_tcc/ant_evaluate.c' - 'ruby sample/ant_trail_tcc/ant_pipe.rb TWO sample/ant_trail_tcc/ant_evaluate.c'
# Copyright 2019 Xilinx Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. description: fadnet for disparity estimation on scene flow. input size: 576*960 float ops: 359G task: disparity estimation framework: pytorch prune: 'no' version: 1.4 files: - name: pt_fadnet_sceneflow_576_960_359G_1.4 type: float & quantized board: GPU download link: https://www.xilinx.com/bin/public/openDownload?filename=pt_fadnet_sceneflow_576_960_359G_1.4.zip checksum: 7404e51b9a3ed1b53994620ccf19c42c - name: FADNet_0_pt type: xmodel board: zcu102 & zcu104 & kv260 download link: https://www.xilinx.com/bin/public/openDownload?filename=FADNet_0_pt-zcu102_zcu104_kv260-r1.4.1.tar.gz checksum: b4776c833279feab03e51c3b3a3fd982 - name: FADNet_1_pt type: xmodel board: zcu102 & zcu104 & kv260 download link: https://www.xilinx.com/bin/public/openDownload?filename=FADNet_1_pt-zcu102_zcu104_kv260-r1.4.1.tar.gz checksum: b5293c8b01790c2142ab298c18c7e6c9 - name: FADNet_2_pt type: xmodel board: zcu102 & zcu104 & kv260 download link: https://www.xilinx.com/bin/public/openDownload?filename=FADNet_2_pt-zcu102_zcu104_kv260-r1.4.1.tar.gz checksum: 8be8347924cdb6edcb1e517a953afc47 - name: FADNet_0_pt type: xmodel board: vck190 download link: https://www.xilinx.com/bin/public/openDownload?filename=FADNet_0_pt-vck190-r1.4.1.tar.gz checksum: 8ca3ad3f119c7649ab59d59011db6c9e - name: FADNet_1_pt type: xmodel board: vck190 download link: https://www.xilinx.com/bin/public/openDownload?filename=FADNet_1_pt-vck190-r1.4.1.tar.gz checksum: 752324d4f76cbb2b44ecc0630a99df67 - name: FADNet_2_pt type: xmodel board: vck190 download link: https://www.xilinx.com/bin/public/openDownload?filename=FADNet_2_pt-vck190-r1.4.1.tar.gz checksum: 458445dc4ec88dc0bc49ef1d416f90e2 license: https://github.com/Xilinx/Vitis-AI/blob/master/LICENSE
<reponame>koenenwmn/optimsoc --- title: '\protect{Open SoC Debug -- \\ Core Debug Module based on the Advanced Debug System}' author: date: work-in-progress ---
<reponame>hprice99/ENGG4811_code --- charIo: - name: char direction: output width: 8 - name: char_output_ready direction: input width: 8 peToNetworkIo: ledIo: - name: led direction: output width: 8 networkToPeIo: nodeIo: - name: matrix_init_from_file direction: input width: 8 matrixIo: - name: matrix direction: output width: 32 - name: matrix_end_row direction: output width: 8 - name: matrix_end direction: output width: 8 - name: matrix_init_type direction: input width: 8 - name: matrix_init_x_coord direction: input width: 8 - name: matrix_init_y_coord direction: input width: 8 - name: matrix_init_element direction: input width: 32 - name: matrix_init_read direction: output width: 8 networkIo:
#template_tql < $RDI_TEMPLATES/sdx/sdaccel/swhw/template.tql description: testinfo generated using import_sdx_test.py script level: 6 owner: soeren user: allowed_test_modes: [hw] force_makefile: "--force" host_args: {all: -k kernel.xclbin --jobs 1024 --seconds 1 --cus 8 --ert} host_cflags: ' -DDSA64 -ldl -luuid' host_exe: host.exe host_src: main.cpp kernels: - {cflags: {add: ' -I.'}, file: addone.xo, ksrc: kernel.cl, name: addone, type: C} name: 100_ert_ncu xclbins: - files: 'addone.xo ' kernels: - cus: [addone_0, addone_1, addone_2, addone_3, addone_4, addone_5, addone_6, addone_7] name: addone num_cus: 8 name: kernel.xclbin
<filename>sample/pid/aht.yaml --- algorithm: class: AgeHierarchyTree deme_size: 200 elite_size: 5 max_layers: 7 aging_scheme: :fibonacci age_gap: 13 layer_diagnostic: true probabilities: crossover: 0.9 mutation: 0.01 init: method: ramped # grow or full or random or ramped sensible_depth: 9 termination: on_individual: :stopping_condition #max_evaluations: 4000000 grammar: class: Abnf::File filename: sample/pid/grammar.abnf representation: class: CodonMod bit_size: 8 mapper: class: DepthLocus wraps_to_fail: 3 track_support_on: true # selection: is hardwired (Tournament with size 2) crossover: class: CrossoverLHS mutation: class: MutationNodal offset: 1 # due to DepthLocus magnitude: 128 store: class: Store filename: ./pid_aht.store report: class: PopulationReportStream report_diversity: false report_statistics: true report_histogram: true individual: class: PipedIndividual shorten_chromozome: true _weak_pareto: :error: minimize :complexity: minimize _pipe_output: - :error: to_f _thresholds: :error: 0.009 # :complexity: 200 _mark_batch: "MARKER\n" _mark_phenotype: "\n" evaluator: class: WorkPipes commands: - 'ruby sample/pid/pipe.rb ONE sample/pid/data.csv' - 'ruby sample/pid/pipe.rb TWO sample/pid/data.csv'
<gh_stars>0 # //////////////////////////////////////////////////////////////////////////////////////// # Copyright (c) by # Company: IDEA LAB, The University of Texas at Dallas # Author : <NAME> # # Originally Create Date: Mar-5, 2020 # Project Name : DNN_Framework # Tool Versions : Python +3 # # Description : Input arguments for setting design parameters # Dependencies : # Additional Comments: # # /////////////////////////////////////////////////////////////////////////////////////// design_setting: #topmodule: "dnn_LeNet" #HLS top module name #design_model : 'C1' #design_model : 'F3' #design_model : 'C1P1F1' #design_model : 'C2P2F1' #design_model : 'AlexNet' design_model : 'LeNet' #design_model : 'ConvNet' #design_model : 'fast_LeNet' #design_model : 'dnn_custom' solution_name: "sol1" create_ip: True run_vivado_synthesize: False run_vivado_power_analyzer: False rebuil_hls: True # if False, it will take files from /src vivado_version: 2020 # 2020 for 'vitis' and newer syn_label: any # is a label used for analyzes when running single synthesize quantitative_analysis: True analysis_log: True # choose one directive set type, details are in each design yaml file #syn_directive_type : 'best' #syn_directive_type : 'custom' syn_directive_type : 'minimal' #syn_directive_type : 'base' #syn_directive_type : 'none' debug: False syn_timeout: 5 # set timeout in Minutes training_setting: # for training a CNN network train_model: False retrain: False TrainingPercentage: 90 test_sample_num: 77 Modeling: # for modeling a CNN network DNN_ML_model: none # train, minimize, retrain, none run_estimation: False run_estimation_log: True method: keras_MLP #all, torch_MLP, keras_MLP, skl_MLP, skl_RFR, skl_GBR excel_report: False saving_trained_model: True show_log_interval: False plot_results: ['jpg']#['jpg', 'svg'] DSE_setting: solution_counts: 3 # for dse_pragma max_parallel_syn: 1 # max parallel running (for DSE pragma) dse_name: default # to place a label for the DSE directive_selection: random # order create_data_for_estimation: True # compact DSE data for building a CNN model copy_bc_files: True remove_hls_run_directories: False # for dse_clock and dse_pragma_clock (MHz) clock_range: min: 80 max: 300 samples: 1 # for dse_dtype dtype_range: in_t: [8,16,32] ker_t: [8,12,16] res_t: [16] all_variables: [8,12,16,24,32] # for dse_cfg and dse_pragma_cfg config_count: 2 # number of configs the DSE will explore cfg_ranges: # ranges to specify range of CNN layers to be explored wout_range: [34, 48, 5] # [min, max, num_samples] lyr_in_range: [2, 5, 2] lyr_out_range: [4, 8, 2] ker_range: [3,5] # [kernels] stride_range: [2,3] #[strides] networks: "c2p2f2": 50 "c3p2f2": 20 Sim_setting: run_csim: False run_rtl_sim: False print_log: False printTOfile: False layersTOprint: [conv, pool, fc] constraints: max_exec_delay: 20 ms DSP: 40% BRAM: 60% HW_setting: execute: none # none, map, sim, syn, impl, all bd_type: HS # HS, ZYNQ, none, AXI_BRAM Power_report: True extension_name: hs dev_etime: 10 FPGA: chip: ZCU102 # VC707, ZC706, ZCU102, KINTEX7 clock_period : 10 vivado_path: linux: 2019: HLS: '/opt/Xilinx/Vivado/2019.2/bin/vivado_hls' VIVADO: '/opt/Xilinx/Vivado/2019.2/bin/vivado' 2020: HLS: '/opt/Xilinx/Vitis_HLS/2020.2/bin/vitis_hls' VIVADO: '/opt/Xilinx/Vivado/2020.2/bin/vivado' win32: 2019: HLS: 'C:\Xilinx\Vivado\2019.2\bin\vivado_hls' VIVADO: 'C:\Xilinx\Vivado\2019.2\bin\vivado' 2020: HLS: 'C:\Xilinx\Vitis\2020.1\bin\vitis_hls' VIVADO: 'C:\Xilinx\Vivado\2020.1\bin\vivado'
<reponame>QuickLogic-Corp/tech_cells_generic<filename>src_files.yml tech_cells_rtl: flags: [ skip_synthesis, ] files: [ src/deprecated/cluster_clk_cells.sv, src/deprecated/cluster_pwr_cells.sv, src/deprecated/generic_memory.sv, src/deprecated/generic_rom.sv, src/deprecated/pad_functional.sv, src/deprecated/pulp_buffer.sv, src/deprecated/pulp_clk_cells.sv, src/deprecated/pulp_pwr_cells.sv, src/rtl/tc_clk.sv, src/tc_pwr.sv, # copied this here to be easy src/deprecated/pulp_clock_gating_async.sv, ] tech_cells_rtl_synth: files: [ src/deprecated/pulp_clock_gating_async.sv, ] tech_cells_fpga: targets: [ xilinx, ] files: [ src/deprecated/cluster_clk_cells_xilinx.sv, src/deprecated/cluster_pwr_cells.sv, src/deprecated/pulp_clk_cells_xilinx.sv, src/deprecated/pulp_pwr_cells.sv, src/deprecated/pulp_buffer.sv, src/fpga/tc_clk_xilinx.sv, src/tc_pwr.sv, ]
<reponame>till-s/slac-dev-board-examples - mmio/AmcCarrierCore/AmcCarrierTiming: - TPGMiniCore/TxPolarity: !<value> 1 - TPGMiniCore/TxLoopback: !<value> 0 - i2c-2/Si5344: - FORCE_HOLD: !<value> 0 - mmio/AmcCarrierCore/AmcCarrierTiming: - EvrV2CoreTriggers: - EvrV2ChannelReg/DestSelMode: !<value> "Dont_Care" - EvrV2ChannelReg/Enable: !<value> 1 - EvrV2TriggerReg/Width: !<value> 10
dist: focal language: c compiler: gcc matrix: include: - script: - mkdir .build && cd .build && cmake .. -DCMAKE_BUILD_TYPE=Debug -DOPAE_ENABLE_MOCK=ON -DOPAE_BUILD_TESTS=ON - make - ctest --timeout 180 before_script: - sudo sysctl -w vm.nr_hugepages=8 cache: ccache env: - CTEST_OUTPUT_ON_FAILURE=1 - OPAE_EXPLICIT_INITIALIZE=1 - LD_LIBRARY_PATH=$TRAVIS_BUILD_DIR/.build/lib addons: apt: packages: - libjson-c-dev - libhwloc-dev - libtbb-dev - uuid-dev - doxygen - linux-headers-generic
top_module: alu repo_name: alu core_dir: ../../examples/alu working_dir: tb/verilator/ rtl_dirs: - rtl - rtl/unitblocks parse_args: '' verif_tools_path: ../../ verilator_args: ''
# Copyright 2021 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # Author: <NAME> apiVersion: kctf.dev/v1 kind: Challenge metadata: name: empty-ls spec: deployed: true powDifficultySeconds: 0 network: public: true ports: - protocol: "TCP" targetPort: 443 healthcheck: # TIP: disable the healthcheck during development enabled: true allowConnectTo: - empty-ls-admin --- apiVersion: networking.k8s.io/v1 kind: NetworkPolicy metadata: name: empty-ls-metadata-access namespace: default spec: podSelector: matchLabels: app: empty-ls policyTypes: - Egress egress: - to: - ipBlock: cidr: 127.0.0.0/8 - ports: - port: 80 protocol: TCP --- apiVersion: v1 kind: ServiceAccount metadata: name: empty-ls annotations: iam.gke.io/gcp-service-account: "<EMAIL>" automountServiceAccountToken: false
<reponame>QueenField/MPSoC-WB-OR1K<filename>software/glip/src/common/logic/fifo/test/test_fifo_dualclock_standard.manifest.yaml module: test_fifo_dualclock_standard sources: - ../verilog/fifo_dualclock_standard.sv toplevel: fifo_dualclock_standard simulators: - vcs parameters: WIDTH: 16 DEPTH: 32
# Copyright 2020 ETH Zurich and University of Bologna. # Licensed under the Apache License, Version 2.0, see LICENSE for details. # SPDX-License-Identifier: Apache-2.0 # Build Docker image and publish to pulp-platform's GHCR. name: build-docker on: push: branches: - master jobs: build-docker: runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 - uses: docker/setup-buildx-action@v1 - name: GHCR Log-in uses: docker/login-action@v1 with: registry: ghcr.io username: pulp-bot password: ${{ secrets.CR_PAT }} - name: Build and push uses: docker/build-push-action@v2 with: context: . file: util/container/Dockerfile push: true tags: ghcr.io/pulp-platform/snitch:latest
ip: name: "uvmt_mio_cli_st" version: "1.0.0 Beta 0" full-name: "Moore.io CLI Testing Grounds UVM Test Bench" type: "DV" sub-type: "UVM Test Bench" sub-sub-type: "Agent Self-Testing" description: > UVM Test Bench used for development of Moore.io CLI. NOT meant for use in actual DV work. home-page: "https://datum-technology-corporation.github.io/mio_cli/" repo-uri: "https://github.com/Datum-Technology-Corporation/mio_cli.git" bugs: "https://github.com/Datum-Technology-Corporation/mio_cli/issues" aliases: [] logo: "" block-diagram: "docs/tb_block_diagram.svg" languages: ["sv"] simulators-supported: - { name: "vivado", version: "2021.1" } tags: [] copyright-holders: ["Datum Technology Corporation"] licenses: ["SHL-2.1"] structure: scripts-paths : [ "bin" ] docs-paths : [ "docs" ] examples-paths: [ "examples" ] src-paths : [ "src" ] dependencies: - { type: "DV", name: "uvm" , repo-uri: "https://github.com/Datum-Technology-Corporation/uvm.git" , repo-branch: "main", repo-path: "dv/uvm" } - { type: "DV", name: "uvml" , repo-uri: "https://github.com/Datum-Technology-Corporation/uvml.git" , repo-branch: "main", repo-path: "dv/uvml" } - { type: "DV", name: "uvml_logs", repo-uri: "https://github.com/Datum-Technology-Corporation/uvml_logs.git", repo-branch: "main", repo-path: "dv/uvml_logs" } - { type: "DV", name: "uvml_sb" , repo-uri: "https://github.com/Datum-Technology-Corporation/uvml_sb.git" , repo-branch: "main", repo-path: "dv/uvml_sb" } hdl-src: top-constructs: ["uvmt_mio_cli_st_tb", "uvml_logs_summary"] directories: [".", "tb", "tests"] test-paths: ["tests"] test-name-template: "uvmt_mio_cli_st_{{ name }}_test_c" flists: metrics : [ "src/uvmt_mio_cli_st_pkg.flist" ] riviera : [ "src/uvmt_mio_cli_st_pkg.flist" ] questa : [ "src/uvmt_mio_cli_st_pkg.flist" ] vcs : [ "src/uvmt_mio_cli_st_pkg.flist" ] vivado : [ "src/uvmt_mio_cli_st_pkg.flist.xsim" ] xcelium : [ "src/uvmt_mio_cli_st_pkg.flist" ]
language: c before_install: - sudo apt-get -qq update # luajit - sudo apt-get install luajit # z3 - if [[ $TARGET = "verilatorSOC" ]] || [[ $TARGET = "terra" ]]; then wget https://github.com/Z3Prover/z3/releases/download/z3-4.7.1/z3-4.7.1-x64-ubuntu-14.04.zip; fi - if [[ $TARGET = "verilatorSOC" ]] || [[ $TARGET = "terra" ]]; then unzip z3-4.7.1-x64-ubuntu-14.04.zip; fi - if [[ $TARGET = "verilatorSOC" ]] || [[ $TARGET = "terra" ]]; then export PATH=$PATH:$PWD/z3-4.7.1-x64-ubuntu-14.04/bin; fi - if [[ $TARGET = "verilatorSOC" ]] || [[ $TARGET = "terra" ]]; then z3 --version; fi # verilator - if [[ $TARGET = "verilator" ]] || [[ $TARGET = "verilatorSOC" ]] || [[ $TARGET = "unit" ]]; then sudo apt-get install verilator; fi - if [[ $TARGET = "verilator" ]] || [[ $TARGET = "verilatorSOC" ]] || [[ $TARGET = "unit" ]]; then export PKG_CONFIG_PATH=/home/travis/build/jameshegarty/rigel/platform/verilator; fi # bjump requires more recent verilator - if [[ $TARGET = "bjump" ]]; then wget https://www.veripool.org/ftp/verilator-4.012.tgz; tar xvzf verilator*.t*gz;cd verilator*;./configure;make -j2;sudo make install; cd ..; fi #terra - if [[ $TARGET = "terra" ]] || [[ $TARGET = "unit" ]]; then wget https://github.com/zdevito/terra/releases/download/release-2016-03-25/terra-Linux-x86_64-332a506.zip; fi - if [[ $TARGET = "terra" ]] || [[ $TARGET = "unit" ]]; then unzip terra-Linux-x86_64-332a506.zip; fi - if [[ $TARGET = "terra" ]] || [[ $TARGET = "unit" ]]; then sudo ln -s /home/travis/build/jameshegarty/rigel/terra-Linux-x86_64-332a506/bin/terra /usr/bin/terra; fi # this installs correct version of glibc for terra binary - if [[ $TARGET = "terra" ]] || [[ $TARGET = "unit" ]]; then sudo add-apt-repository -y ppa:ubuntu-toolchain-r/test; fi - if [[ $TARGET = "terra" ]] || [[ $TARGET = "unit" ]]; then sudo apt-get -y update; fi - if [[ $TARGET = "terra" ]] || [[ $TARGET = "unit" ]]; then sudo apt-get -y install libstdc++6-4.7-dev; fi #for coveralls - if [[ $TARGET = "unit" ]] || [[ $TARGET = "coverage" ]]; then sudo apt-get install luarocks; fi - if [[ $TARGET = "unit" ]] || [[ $TARGET = "coverage" ]]; then sudo luarocks install luacov-coveralls; fi - if [[ $TARGET = "unit" ]] || [[ $TARGET = "coverage" ]]; then eval `luarocks path --bin`; fi script: # early out on errors - set -e - if [[ $TARGET = "unit" ]]; then export LUA="../rigelLuajit -lluacov"; fi - if [[ $TARGET = "unit" ]]; then cd unittests; make; else cd examples; make -j2 $TARGET; fi - if [[ $TARGET = "verilog" ]]; then make -j2 wrapper; fi # check that make actually 100% completed, just to be really sure (?) - pwd - echo out/${TARGET}_done.txt - if [[ $TARGET != "unit" ]]; then test -e out/${TARGET}_done.txt || exit; fi - if [[ $TARGET = "unit" ]]; then luacov-coveralls -v; fi env: - TARGET=verilog - TARGET=verilator - TARGET=verilatorSOC - TARGET=terra - TARGET=bjump - TARGET=unit - TARGET=coverage # - TARGET=axiverilog
# Copyright 2020 ETH Zurich and University of Bologna. # Licensed under the Apache License, Version 2.0, see LICENSE for details. # SPDX-License-Identifier: Apache-2.0 name: publish-docs on: push: branches: - master jobs: deploy: runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 - uses: actions/setup-python@v2 with: python-version: 3.x - uses: mattnotmitt/doxygen-action@v1 with: working-directory: sw/ - name: Generate Runtime Documentation run: | mkdir doxybook2; cd doxybook2 wget https://github.com/matusnovak/doxybook2/releases/download/v1.4.0/doxybook2-linux-amd64-v1.4.0.zip unzip doxybook2-linux-amd64-v1.4.0.zip; cd ../ chmod +x doxybook2/bin/doxybook2 mkdir docs/runtime ./doxybook2/bin/doxybook2 --input sw/doxygen/xml --output docs/runtime --config docs/doxybook2.json rm -rf doxybook2 - run: pip install -r docs/requirements.txt - run: mkdocs gh-deploy --force
quick_check:kc705_basex__2020.2: extends: .template_vivado_quick_check variables: VIVADO_VERSION: "2020.2" PROJ: kc705_basex quick_check:kc705_basex__2019.2: extends: .template_vivado_quick_check variables: VIVADO_VERSION: "2019.2" PROJ: kc705_basex quick_check:kc705_gmii__2020.2: extends: .template_vivado_quick_check variables: VIVADO_VERSION: "2020.2" PROJ: kc705_gmii quick_check:kc705_gmii__2019.2: extends: .template_vivado_quick_check variables: VIVADO_VERSION: "2019.2" PROJ: kc705_gmii quick_check:kcu105_basex__2020.2: extends: .template_vivado_quick_check variables: VIVADO_VERSION: "2020.2" PROJ: kcu105_basex quick_check:kcu105_basex__2019.2: extends: .template_vivado_quick_check variables: VIVADO_VERSION: "2019.2" PROJ: kcu105_basex quick_check:zcu102_basex__2020.2: extends: .template_vivado_quick_check variables: VIVADO_VERSION: "2020.2" PROJ: zcu102_basex quick_check:zcu102_basex__2019.2: extends: .template_vivado_quick_check variables: VIVADO_VERSION: "2019.2" PROJ: zcu102_basex quick_check:zcu102_c2c_loopback__2019.2: extends: .template_vivado_quick_check variables: VIVADO_VERSION: "2019.2" PROJ: zcu102_c2c_loopback quick_check:k800__2020.2: extends: .template_vivado_quick_check variables: VIVADO_VERSION: "2020.2" PROJ: k800 quick_check:k800__2019.2: extends: .template_vivado_quick_check variables: VIVADO_VERSION: "2019.2" PROJ: k800 quick_check:vcu118_pcie__2020.2: extends: .template_vivado_quick_check variables: VIVADO_VERSION: "2020.2" PROJ: vcu118_pcie quick_check:vcu118_pcie__2019.2: extends: .template_vivado_quick_check variables: VIVADO_VERSION: "2019.2" PROJ: vcu118_pcie quick_check:vcu118_sgmii__2020.2: extends: .template_vivado_quick_check variables: VIVADO_VERSION: "2020.2" PROJ: vcu118_sgmii quick_check:vcu118_sgmii__2019.2: extends: .template_vivado_quick_check variables: VIVADO_VERSION: "2019.2" PROJ: vcu118_sgmii check-depfiles: extends: .template_base tags: - docker - xilinx-tools stage: quick_checks variables: VIVADO_VERSION: "2019.2" script: - ipbb init work_area - cd work_area - ln -s ${CI_PROJECT_DIR} src/ipbus-firmware - ./src/ipbus-firmware/tests/ci/check-dep-files.sh artifacts: when: on_success paths: - work_area expire_in: 1 day
# Copyright 2019 Xilinx Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. description: resnetv1_50 classifier on ImageNet. input size: 224*224 float ops: 6.97G task: classification framework: tensorflow prune: 'no' version: 2.0 files: - name: tf_resnetv1_50_imagenet_224_224_6.97G_2.0 type: float & quantized board: GPU download link: https://www.xilinx.com/bin/public/openDownload?filename=tf_resnetv1_50_imagenet_224_224_6.97G_2.0.zip checksum: f54262612f40f612d188ae05a05a591a - name: resnet_v1_50_tf type: xmodel board: zcu102 & zcu104 & kv260 download link: https://www.xilinx.com/bin/public/openDownload?filename=resnet_v1_50_tf-zcu102_zcu104_kv260-r2.0.0.tar.gz checksum: 6c6322c945b9ae6826991be70f0389ee - name: resnet_v1_50_tf type: xmodel board: vck190 download link: https://www.xilinx.com/bin/public/openDownload?filename=resnet_v1_50_tf-vck190-r2.0.0.tar.gz checksum: 1efd235fcc01ca767a92e3dfcd63e8e8 - name: resnet_v1_50_tf type: xmodel board: vck50006pe-DPUCVDX8H-DWC download link: https://www.xilinx.com/bin/public/openDownload?filename=resnet_v1_50_tf-vck50006pe-DPUCVDX8H-DWC-r2.0.0.tar.gz checksum: d36c9f095454b46322a606da304448b6 - name: resnet_v1_50_tf type: xmodel board: vck50008pe-DPUCVDX8H download link: https://www.xilinx.com/bin/public/openDownload?filename=resnet_v1_50_tf-vck50008pe-DPUCVDX8H-r2.0.0.tar.gz checksum: 198bf9fd049b03aac70f6cf95e6ce702 - name: resnet_v1_50_tf type: xmodel board: u50lv-DPUCAHX8H download link: https://www.xilinx.com/bin/public/openDownload?filename=resnet_v1_50_tf-u50lv-DPUCAHX8H-r2.0.0.tar.gz checksum: 91401a14c2c3258e25ec315dc25a5243 - name: resnet_v1_50_tf type: xmodel board: u50lv-DPUCAHX8H-DWC & u55c-DPUCAHX8H-DWC download link: https://www.xilinx.com/bin/public/openDownload?filename=resnet_v1_50_tf-u55c-u50lv-DPUCAHX8H-DWC-r2.0.0.tar.gz checksum: 9515147b13482f4def57f52cdd23f9ca - name: resnet_v1_50_tf type: xmodel board: u200-DPUCADF8H & u250-DPUCADF8H download link: https://www.xilinx.com/bin/public/openDownload?filename=resnet_v1_50_tf-u200-u250-r2.0.0.tar.gz checksum: 81147d7633e3ef29f22614ab13c496d9 license: https://github.com/Xilinx/Vitis-AI/blob/master/LICENSE
--- title: '\protect{Open SoC Debug -- \\ Memory Mapped I/O Bridge}' author: date: work-in-progress ---
# Test build the MoveIt! tutorials. Author: <NAME> sudo: required dist: trusty language: ruby rvm: - 2.4 python: - "2.7" compiler: - gcc before_install: # Use this to prepare the system to install prerequisites or dependencies # Define some config vars - export NOKOGIRI_USE_SYSTEM_LIBRARIES=true - export CI_SOURCE_PATH=$(pwd) - export REPOSITORY_NAME=${PWD##*/} - echo "Testing branch $TRAVIS_BRANCH of $REPOSITORY_NAME" - sudo -E sh -c 'echo "deb http://packages.ros.org/ros/ubuntu `lsb_release -cs` main" > /etc/apt/sources.list.d/ros-latest.list' - wget http://packages.ros.org/ros.key -O - | sudo apt-key add - - sudo apt-get update -qq # Start: there is an issue with postgres on Travis. By removing postgres, the issue is resolved. # TODO: remove this work-around - sudo apt-get purge postgresql* -y -qq - sudo apt-get autoremove -y -qq - sudo apt-get install synaptic -y -qq - sudo apt-get update -qq - sudo apt-get upgrade -y -qq --allow-unauthenticated - sudo apt-get install postgresql -y -qq # End: changes made to fix postgresql issue - sudo apt-get install -qq -y python-rosdep python-wstool python-catkin-tools # Setup rosdep - sudo rosdep init - rosdep update # Install htmlpoofer - gem update --system - gem --version - gem install html-proofer # Install ROS's version of sphinx - sudo apt-get -qq install ros-indigo-rosdoc-lite - source /opt/ros/indigo/setup.bash script: # Test build with non-ROS wrapped Sphinx command to allow warnings and errors to be caught - sphinx-build -W -b html . native_build # Test build with ROS-version of Sphinx command so that it is generated same as ros.org - rosdoc_lite -o build . # Run HTML tests on generated build output to check for 404 errors, etc - htmlproofer ./build --only-4xx --check-html --file-ignore ./build/html/genindex.html,./build/html/search.html,./build/html/index-msg.html --alt-ignore '/.*/' --url-ignore '#' # after_success and deploy are skipped if build is broken after_success: # Tell GitHub Pages not to bypass Jekyll processing - touch build/html/.nojekyll
# Adapted from Garnet and ButterPHY name: input_buffer commands: - | mkdir -p outputs tar -xzvf /home/sjkim85/dragonphy_tarballs/input_buffer-latest.tar.gz -C outputs outputs: - input_buffer.lef - input_buffer.gds - input_buffer.spi - input_buffer.version
<reponame>rsarwar87/koheron-sdk version: 2 jobs: build: docker: - image: circleci/python:2.7-stretch-browsers - image: circleci/node:10.11.0-stretch-browsers steps: - checkout - run: name: Append dev to source.list command: sudo bash -c "echo deb http://archive.ubuntu.com/ubuntu/ xenial main >> /etc/apt/sources.list"; sudo bash -c "echo deb-src http://archive.ubuntu.com/ubuntu/ xenial main >> /etc/apt/sources.list" - run: name: Install GCC command: sudo apt-get update; sudo apt-get install -y --allow-unauthenticated gcc-5 g++-5 gcc-5-arm-linux-gnueabihf g++-5-arm-linux-gnueabihf; sudo update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-5 100; sudo update-alternatives --install /usr/bin/g++ g++ /usr/bin/g++-5 100 - run: name: Link GCC command: sudo ln -s /usr/bin/arm-linux-gnueabihf-gcc-5 /usr/bin/arm-linux-gnueabihf-gcc; sudo ln -s /usr/bin/arm-linux-gnueabihf-gcc-5 /usr/bin/arm-linux-gnueabihf-g++ - run: name: Install python requirements command: sudo pip install -r requirements.txt - run: name: Install Koheron python command: sudo pip install python/. - run: name: Setup web command: curl -sL https://deb.nodesource.com/setup_10.x | sudo -E bash -; sudo make setup_web - run: name: Build server command: sudo bash build_examples.sh server - run: name: Build web command: sudo bash build_examples.sh web deploy: docker: - image: circleci/python:2.7-stretch-browsers steps: - checkout - run: name: Install python requirements command: sudo pip install -r requirements.txt - run: name: install twine command: sudo pip install twine - run: name: Upload Koheron python to pypi command: sudo make PYPI_USERNAME=$PYPI_USERNAME PYPI_PASSWORD=$PYPI_PASSWORD upload_pip workflows: version: 2 build-deploy: jobs: - build - deploy: filters: branches: only: master
<reponame>ruck314/dev-board-misc-utils<gh_stars>0 ############################################################################## ## This file is part of 'dev-board-misc-utils'. ## It is subject to the license terms in the LICENSE.txt file found in the ## top-level directory of this distribution and at: ## https://confluence.slac.stanford.edu/display/ppareg/LICENSE.html. ## No part of 'dev-board-misc-utils', including this file, ## may be copied, modified, propagated, or distributed except according to ## the terms contained in the LICENSE.txt file. ############################################################################## #schemaversion 3.0.0 #once AxiFanController.yaml # AxiFanController: &AxiFanController size: 0x00000010 class: MMIODev children: TemperatureADC: class: IntField mode: RO sizeBits: 16 signed: false description: "Sysmon Temperature ADC Reading" at: { offset: 0x0 } SysMonAlarm: class: IntField mode: RO sizeBits: 1 lsBit: 0 description: "Over-Temperature Alarm" at: { offset: 0x2 } MultOverrange: class: IntField mode: RO sizeBits: 1 lsBit: 1 description: "Multiplier Overrange (forces full speed)" at: { offset: 0x2 } Kp: class: IntField mode: RW signed: false sizeBits: 7 lsBit: 0 description: "Feedback Kp Coefficient (0..127)" at: { offset: 0x4 } Preshift: class: IntField mode: RW signed: false sizeBits: 4 lsBit: 4 description: "Feedback Pre-shift: del = (T-TargetT) << preshift" at: { offset: 0x5 } RefTempAdc: class: IntField mode: RW signed: false sizeBits: 16 lsBit: 0 description: "Feedback Reference Temp. (Equivalent ADC)" at: { offset: 0x6 } Bypass: class: IntField mode: RW sizeBits: 1 lsBit: 7 description: "Feedback Bypass" at: { offset: 0x4 } Speed: class: IntField mode: RW sizeBits: 4 lsBit: 0 description: "Fan Speed (0..15) when Feedback Bypassed" at: { offset: 0x5 }
<reponame>FelixLuciano/Elements-of-Computing-Systems # This workflow will install Python dependencies, run tests and lint with a single version of Python # For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions name: Teste on: push: branches: [ main ] pull_request: branches: [ main ] jobs: build: runs-on: ubuntu-20.04 env: VUNIT_SIMULATOR: modelsim VUNIT_MODELSIM_PATH: /home/runner/modelsim_ase/bin/ LD_LIBRARY_PATH: /home/runner/modelsim_ase/lib32 steps: - uses: actions/checkout@v2 - name: Set up Python 3.8 uses: actions/setup-python@v2 with: python-version: 3.8 - name: Install ubuntu dependencies run: | sudo dpkg --add-architecture i386 sudo apt-get update sudo apt-get install build-essential sudo apt-get install gcc make libxft2:i386 libxext6:i386 \ libncurses5:i386 libstdc++6:i386 libpng-dev libpng16-16:i386 \ libpng16-16 python-gobject libnotify-bin - name: install modelsim run: | wget https://download.altera.com/akdlm/software/acdsinst/20.1std.1/720/ib_installers/ModelSimSetup-20.1.1.720-linux.run -O modelsim.run chmod +x modelsim.run ./modelsim.run --mode unattended --accept_eula 1 --installdir $HOME sed -i '209 a\ 4.[0-9]*) vco="linux" ;;' $HOME/modelsim_ase/vco - name: Install python dependencies run: | pip install wheel pip install termcolor vunit_hdl pymongo - name: teste vhdl run: | bash updateZ01tools.sh python3 Projetos/F-Computador/testeAssemblyMyCPU.py python3 Projetos/F-Computador/testeHW.py lib.tb_memoryio.all python3 Projetos/F-Computador/testeHW.py lib.tb_controlunit.all # python3 Projetos/B-LogicaCombinacional/testeLogicaCombinacional.py # python3 Projetos/C-UnidadeLogicaAritmetica/testeULA.py # python3 Projetos/D-LogicaSequencial/testeLogicaSequencial.py # python3 Projetos/E-Assembly/testeAssembly.py
<gh_stars>10-100 soc_interconnect: files: [ RTL/l2_tcdm_demux.sv, RTL/lint_2_apb.sv, RTL/lint_2_axi.sv, RTL/axi_2_lint/axi64_2_lint32.sv, RTL/axi_2_lint/axi_read_ctrl.sv, RTL/axi_2_lint/axi_write_ctrl.sv, RTL/axi_2_lint/lint64_to_32.sv, RTL/XBAR_L2/AddressDecoder_Req_L2.sv, RTL/XBAR_L2/AddressDecoder_Resp_L2.sv, RTL/XBAR_L2/ArbitrationTree_L2.sv, RTL/XBAR_L2/FanInPrimitive_Req_L2.sv, RTL/XBAR_L2/FanInPrimitive_Resp_L2.sv, RTL/XBAR_L2/MUX2_REQ_L2.sv, RTL/XBAR_L2/RequestBlock_L2_1CH.sv, RTL/XBAR_L2/RequestBlock_L2_2CH.sv, RTL/XBAR_L2/ResponseBlock_L2.sv, RTL/XBAR_L2/ResponseTree_L2.sv, RTL/XBAR_L2/RR_Flag_Req_L2.sv, RTL/XBAR_L2/XBAR_L2.sv, RTL/XBAR_BRIDGE/AddressDecoder_Req_BRIDGE.sv, RTL/XBAR_BRIDGE/AddressDecoder_Resp_BRIDGE.sv, RTL/XBAR_BRIDGE/ArbitrationTree_BRIDGE.sv, RTL/XBAR_BRIDGE/FanInPrimitive_Req_BRIDGE.sv, RTL/XBAR_BRIDGE/FanInPrimitive_Resp_BRIDGE.sv, RTL/XBAR_BRIDGE/MUX2_REQ_BRIDGE.sv, RTL/XBAR_BRIDGE/RequestBlock1CH_BRIDGE.sv, RTL/XBAR_BRIDGE/RequestBlock2CH_BRIDGE.sv, RTL/XBAR_BRIDGE/ResponseBlock_BRIDGE.sv, RTL/XBAR_BRIDGE/ResponseTree_BRIDGE.sv, RTL/XBAR_BRIDGE/RR_Flag_Req_BRIDGE.sv, RTL/XBAR_BRIDGE/XBAR_BRIDGE.sv, ]
cache_root_dir: /usr/scratch2/dolent1/gitlabci/buildcache/ara artifacts: tc-gcc: inputs: - Makefile - toolchain/riscv-gnu-toolchain outputs: - install/riscv-gcc tc-verilator: inputs: - Makefile - toolchain/verilator outputs: - install/verilator tc-isa-sim: inputs: - Makefile - toolchain/riscv-isa-sim outputs: - install/riscv-isa-sim
package: name: pytblink-rpc version: None deps: - name: tblink-rpc-core url: https://github.com/tblink-rpc/tblink-rpc-core.git dev-deps: - name: tblink-rpc-core url: https://github.com/tblink-rpc/tblink-rpc-core.git
<filename>models/AI-Model-Zoo/model-list/pt_personreid-res18_market1501_176_80_1.1G_2.0/model.yaml<gh_stars>1-10 # Copyright 2019 Xilinx Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. description: 'Person re-identification model (backbone: resnet18).' input size: 176*80 float ops: 1.1G task: person reid framework: pytorch prune: 'no' version: 2.0 files: - name: pt_personreid-res18_market1501_176_80_1.1G_2.0 type: float & quantized board: GPU download link: https://www.xilinx.com/bin/public/openDownload?filename=pt_personreid-res18_market1501_176_80_1.1G_2.0.zip checksum: 102157d202f82bab61a3a9ac3f141142 - name: personreid-res18_pt type: xmodel board: zcu102 & zcu104 & kv260 download link: https://www.xilinx.com/bin/public/openDownload?filename=personreid-res18_pt-zcu102_zcu104_kv260-r2.0.0.tar.gz checksum: 2316c8cb591ae10268c60d862bbd51d2 - name: personreid-res18_pt type: xmodel board: vck190 download link: https://www.xilinx.com/bin/public/openDownload?filename=personreid-res18_pt-vck190-r2.0.0.tar.gz checksum: 50339b9c9fc9cf3de947d7705b456702 - name: personreid-res18_pt type: xmodel board: vck50006pe-DPUCVDX8H-DWC download link: https://www.xilinx.com/bin/public/openDownload?filename=personreid-res18_pt-vck50006pe-DPUCVDX8H-DWC-r2.0.0.tar.gz checksum: 44d0d138a6c05b12b7ae614415de4494 - name: personreid-res18_pt type: xmodel board: u50lv-DPUCAHX8H download link: https://www.xilinx.com/bin/public/openDownload?filename=personreid-res18_pt-u50lv-DPUCAHX8H-r2.0.0.tar.gz checksum: 97547beaa1d3751cb6a785baf717c5c7 - name: personreid-res18_pt type: xmodel board: u50lv-DPUCAHX8H-DWC & u55c-DPUCAHX8H-DWC download link: https://www.xilinx.com/bin/public/openDownload?filename=personreid-res18_pt-u55c-u50lv-DPUCAHX8H-DWC-r2.0.0.tar.gz checksum: 90405afe9bc19528a7a7c5459a9a5399 license: https://github.com/Xilinx/Vitis-AI/blob/master/LICENSE
<filename>2021/quals/pwn-atheris/challenge.yaml apiVersion: kctf.dev/v1 kind: Challenge metadata: name: atheris spec: deployed: true powDifficultySeconds: 0 network: public: true healthcheck: # TIP: disable the healthcheck during development enabled: true
<reponame>abs-tudelft/vhlib<gh_stars>1-10 resources: repositories: - repository: abs-tudelft type: github endpoint: github name: abs-tudelft/azure-pipelines-templates jobs: - template: jobs/vhdeps.yml@abs-tudelft parameters: sources: ['sim', 'stream', 'util']
- Global: Print : true - Library: Name : merge_sorter Format : "#{File.basename(file_name,\".vhd\")}.o : #{file_name} #{@use_list.map{|u|File.basename(u.file_name,\".vhd\")+\".o\"}.join(\" \")}\n\tghdl -a -C $(GHDLFLAGS) --work=#{library_name} #{file_name}\n\n" PathList : - "../../../src/main/vhdl/" - "../../../src/test/vhdl/" Top : ["OddEven_Sorter_Test_Bench"]
package: name: udma_camera authors: - "<NAME> <<EMAIL>>" - "<NAME> <<EMAIL>>" - "<NAME> <<EMAIL>>" - "<NAME> <<EMAIL>>" - "<NAME> <<EMAIL>>" dependencies: tech_cells_generic: { git: "https://github.com/pulp-platform/tech_cells_generic.git", version: 0.2.3 } udma_core: { git: "https://github.com/pulp-platform/udma_core.git", version: 1.0.2 } sources: - rtl/camera_reg_if.sv - rtl/camera_if.sv
#template_tql < $RDI_TEMPLATES/sdx/sdaccel/swhw/template.tql description: testinfo generated using import_sdx_test.py script level: 6 owner: sonals user: allowed_test_modes: [sw_emu, hw_emu, hw] force_makefile: "--force" host_args: {all: -d acc} host_cflags: ' -DDSA64' host_exe: host.exe host_src: main.cpp oclErrorCodes.cpp oclHelper.cpp name: 003_bringup0 only_host_code: 1
<filename>server/src/test/deployments/federated_basic_sql/provider1/configuration.yml hosts: provider1_machine: processes: main_instance: components: core: config_file: core_config.py type: core experiment_dummy1: class: experiments.dummy.DummyExperiment config: dummy_verbose: false type: experiment experiment_dummy3_with_other_name: class: experiments.dummy.DummyExperiment config: dummy_verbose: false type: experiment laboratory: config_file: lab_config.py type: laboratory config_file: process_config.py
--- name: make release on: push: tags: ["d*.*.*"] jobs: build: runs-on: ubuntu-latest timeout-minutes: 60 env: BOOTSTRAP: https://github.com/modula3/cm3/releases/download/d5.11.4/cm3-boot-AMD64_LINUX-d5.11.4.tar.xz steps: - name: Install prerequisites run: | sudo apt-get update --quiet sudo apt-get install --quiet --assume-yes ninja-build - name: Define install location run: | echo CM3_INSTALL="$(pwd)/../install" >> $GITHUB_ENV - name: Install bootstrap run: | mkdir ../bootstrap ../build curl --location --silent "${BOOTSTRAP}" | tar Jxf - --directory=../bootstrap --strip-components=1 cmake -S ../bootstrap -B ../build -G Ninja -DCMAKE_INSTALL_PREFIX="${CM3_INSTALL}" cmake --build ../build cmake --install ../build echo "${CM3_INSTALL}/bin" >> $GITHUB_PATH - name: Fetch sources uses: actions/checkout@v2 - name: Fetch tag run: git fetch --force origin ${{ github.ref }}:${{ github.ref }} - name: Upgrade compiler run: | scripts/concierge.py upgrade - name: Build distributions run: | scripts/concierge.py make-dist --target AMD64_LINUX scripts/concierge.py make-dist --target AMD64_NT scripts/concierge.py make-dist --target I386_LINUX scripts/concierge.py make-dist --target I386_NT - name: Create release uses: softprops/action-gh-release@v1 with: draft: true files: | *.7z *.tar.xz
<filename>.travis.yml dist: trusty sudo: required language: bash before_install: - sudo apt-add-repository "deb http://archive.ubuntu.com/ubuntu trusty-backports main restricted universe multiverse" - sudo apt-get -qq update - sudo apt-get install gperf -y - wget https://sourceforge.net/projects/iverilog/files/latest/download -O verilog.tar.gz - tar xvzf verilog.tar.gz - cd verilog* - ./configure && make - sudo make install - cd - script: - time make sim notify-send="@#echo"
name: CI on: [push] jobs: build: runs-on: ubuntu-latest steps: - uses: actions/checkout@v1 - name: Install verilator run: sudo apt-get install verilator - name: Install fusesoc run: | sudo pip install fusesoc fusesoc init -y - name: Setup project run: | cd Hardware fusesoc library add Illusion . - name: Simulate Command Fetcher run: | cd Hardware fusesoc run --target sim_command_fetcher Illusion:Illusion:Illusion:1.2 - name: Simulate Command Processor run: | cd Hardware fusesoc run --target sim_command_processor Illusion:Illusion:Illusion:1.2 - name: Floating point core run: | cd Hardware fusesoc run --target=fp_core Illusion:Illusion:FPCore:0.1 - name: Simulate Top run: | cd Hardware fusesoc run --target sim Illusion:System:Top:1.0
<filename>models/AI-Model-Zoo/model-list/cf_face-quality_80_60_61.68M_2.0/model.yaml # Copyright 2019 Xilinx Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. description: face quality model. This model is converted from the pytorch model-face_quality. It outputs a face quality score, which is usually used to filter low-quality faces to further improve face recognition accuracy. input size: 80*60 float ops: 61.68M task: face quality framework: caffe prune: 'no' version: 2.0 files: - name: cf_face-quality_80_60_61.68M_2.0 type: float & quantized board: GPU download link: https://www.xilinx.com/bin/public/openDownload?filename=cf_face-quality_80_60_61.68M_2.0.zip checksum: 9b8e6190c472f08d7c9692aa96976e32 - name: face-quality type: xmodel board: zcu102 & zcu104 & kv260 download link: https://www.xilinx.com/bin/public/openDownload?filename=face-quality-zcu102_zcu104_kv260-r2.0.0.tar.gz checksum: 3eee919ce2b111e381cac760c3c0caa8 - name: face-quality type: xmodel board: vck190 download link: https://www.xilinx.com/bin/public/openDownload?filename=face-quality-vck190-r2.0.0.tar.gz checksum: d39096bc1c63966f8799287d194d1c66 - name: face-quality type: xmodel board: vck50006pe-DPUCVDX8H-DWC download link: https://www.xilinx.com/bin/public/openDownload?filename=face-quality-vck50006pe-DPUCVDX8H-DWC-r2.0.0.tar.gz checksum: 00800aea9794d4ca39251407e7f6d018 - name: face-quality type: xmodel board: vck50008pe-DPUCVDX8H download link: https://www.xilinx.com/bin/public/openDownload?filename=face-quality-vck50008pe-DPUCVDX8H-r2.0.0.tar.gz checksum: 1fc5d4e9e8931008528108269dc73538 - name: face-quality type: xmodel board: u50lv-DPUCAHX8H download link: https://www.xilinx.com/bin/public/openDownload?filename=face-quality-u50lv-DPUCAHX8H-r2.0.0.tar.gz checksum: 11ed4c448cca1bb674185086f7f013db - name: face-quality type: xmodel board: u50lv-DPUCAHX8H-DWC & u55c-DPUCAHX8H-DWC download link: https://www.xilinx.com/bin/public/openDownload?filename=face-quality-u55c-u50lv-DPUCAHX8H-DWC-r2.0.0.tar.gz checksum: fac3169c2b4209a612137a33e98309c1 license: https://github.com/Xilinx/Vitis-AI/blob/master/LICENSE
<reponame>PrincetonUniversity/prga context: ../../../../fpga/magic/grady18v2_N10_mem32Kb_42x34/ctx.pkl compiler: vcs app: name: picorv32_axi sources: - ../../src/picorv32.v parameters: COMPRESSED_ISA: 1 ENABLE_MUL: 1 ENABLE_FAST_MUL: 1 ENABLE_DIV: 1 ENABLE_IRQ: 1 ENABLE_TRACE: 1 constraints: io: io.partial tests: picorv32_test_basic: sources: - ../../src/picorv32_test_basic.v run_flags: - +firmware=${PRGA_ROOT}/examples/app/picorv32/src/firmware.hex - +max_cycle=1000000
--- algorithm: class: MuLambda comma_or_plus: comma population_size: 500 lambda_size: 600 elite_size: 10 probabilities: crossover: 0.9 mutation: 0.01 injection: 0.9 termination: max_steps: 2000 on_individual: :stopping_condition init: method: ramped # grow or full or random or ramped random_length: 8 sensible_depth: 4 inject: # alternative to a crossover or copy method: full # grow or full or random random_length: 8 sensible_depth: 3 grammar: class: Abnf::File filename: sample/toy_regression/grammar.abnf mapper: class: DepthLocus wraps_to_fail: 3 selection: class: RankSampling #Truncation selection_rank: #optional class: Ranking # min: 0.5 # max: 1.5 elite_rank: class: Ranking crossover: class: CrossoverRipple margin: 2 #1 step: 2 mutation: class: MutationRipple store: class: Store filename: ./toy_mu_comma_lambda.store report: class: ToyReport require: sample/toy_regression/toy_report.rb individual: class: ToyIndividualSingleObjective require: sample/toy_regression/toy_individual.rb shorten_chromozome: true
<reponame>bluetiger9/Vitis-AI # Copyright 2019 Xilinx Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. description: CenterPoint on Astyx 4D radar data. input size: 2560*40*4 float ops: 54G task: 3d detection framework: pytorch prune: 'no' version: 1.4 files: - name: pt_centerpoint_astyx_2560_40_54G_1.4 type: float & quantized board: GPU download link: https://www.xilinx.com/bin/public/openDownload?filename=pt_centerpoint_astyx_2560_40_54G_1.4.zip checksum: b7a7fccd067b11cbae41b3a11fef8211 - name: centerpoint_0_pt type: xmodel board: zcu102 & zcu104 & kv260 download link: https://www.xilinx.com/bin/public/openDownload?filename=centerpoint_0_pt-zcu102_zcu104_kv260-r1.4.1.tar.gz checksum: 92456422d38c36b0f08bdf666b176211 - name: centerpoint_1_pt type: xmodel board: zcu102 & zcu104 & kv260 download link: https://www.xilinx.com/bin/public/openDownload?filename=centerpoint_1_pt-zcu102_zcu104_kv260-r1.4.1.tar.gz checksum: 0d123bee47d89c5f5237cd14c26a2095 - name: centerpoint_0_pt type: xmodel board: vck190 download link: https://www.xilinx.com/bin/public/openDownload?filename=centerpoint_0_pt-vck190-r1.4.1.tar.gz checksum: d769b2bc3599792cab8072d7a21676c6 - name: centerpoint_1_pt type: xmodel board: vck190 download link: https://www.xilinx.com/bin/public/openDownload?filename=centerpoint_1_pt-vck190-r1.4.1.tar.gz checksum: 4e7540a11f3e37667ecc7a3fb1e1cae5 license: https://github.com/Xilinx/Vitis-AI/blob/master/LICENSE
main: - title: GitHub url: https://github.com/PrincetonUniversity/prga - title: Documentation url: https://prga.readthedocs.io - title: Publications url: /publications.html
<filename>.github/workflows/test.yml name: Dynamic Scheduler Tests on: push jobs: build: runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 with: submodules: 'recursive' fetch-depth: 0 clean: false - name: Install dependencies run: | sudo apt-get update sudo apt-get install libfl-dev git sudo apt-get install gcc-11 g++-11 sudo apt-get install python3-pip python3-setuptools sudo pip3 install robotframework==3.2.2 robotframework-pabot - name: Build Verilator env: CC: gcc-11 CXX: g++-11 run: | cd verilator git show autoconf ./configure make -j$(nproc) sudo make install - name: Verilator version run: | verilator --version - name: Build vcddiff run: | git clone https://github.com/veripool/vcddiff make -C vcddiff sudo cp -p vcddiff/vcddiff /usr/local/bin - name: Generate Robot Tests env: CC: gcc-11 CXX: g++-11 run: ./gen_robot.py - name: Run Robot Tests env: CC: gcc-11 CXX: g++-11 run: pabot --processes $(($(nproc) + 1)) --verbose --testlevelsplit --noncritical should_fail --noncritical dist --noncritical opt --noncritical perf --noncritical file_grep --noncritical code_gen robot_tests || true - name: Show Diff In Results run: | git fetch origin gh-pages ./compare_robot_logs.py <(git show origin/gh-pages:output.xml) output.xml | tee diff.log - name: Upload Artifacts uses: actions/upload-artifact@v2 with: name: robot_logs path: | report.html output.xml log.html diff.log - name: Deploy if: github.ref == 'refs/heads/master' run: | mkdir deploy mv report.html output.xml log.html diff.log deploy touch deploy/.nojekyll echo '<head> <meta http-equiv="refresh" content="0; url=report.html" /> </head>' > deploy/index.html cd deploy git init cp ../.git/config ./.git/config git add . git config --local user.email "push@gha" git config --local user.name "GHA" git commit -am "update ${{ github.sha }}" git push -u origin +HEAD:gh-pages rm -rf .git
--- charIo: - name: char direction: output width: 8 - name: char_output_ready direction: input width: 8 peToNetworkIo: - name: message_out_ready direction: input width: 8 - name: x_coord direction: output width: 8 - name: y_coord direction: output width: 8 - name: multicast_group direction: output width: 8 - name: ready_flag direction: output width: 8 - name: result_flag direction: output width: 8 - name: matrix_type direction: output width: 8 - name: matrix_x_coord direction: output width: 8 - name: matrix_y_coord direction: output width: 8 - name: matrix_element direction: output width: 32 - name: packet_complete direction: output width: 8 ledIo: - name: led direction: output width: 8 networkToPeIo: - name: message_valid direction: input width: 8 - name: message_in_available direction: input width: 8 - name: multicast_group direction: input width: 8 - name: ready_flag direction: input width: 8 - name: result_flag direction: input width: 8 - name: matrix_type direction: input width: 8 - name: matrix_x_coord direction: input width: 8 - name: matrix_y_coord direction: input width: 8 - name: matrix_element direction: input width: 32 - name: message_read direction: output width: 8 nodeIo: - name: x_coord direction: input width: 8 - name: y_coord direction: input width: 8 - name: node_number direction: input width: 8 - name: matrix_x_offset direction: input width: 8 - name: matrix_y_offset direction: input width: 8 - name: matrix_init_from_file direction: input width: 8 matrixIo: - name: matrix direction: output width: 32 - name: matrix_end_row direction: output width: 8 - name: matrix_end direction: output width: 8 - name: fox_matrix_size direction: input width: 32 networkIo: - name: fox_network_stages direction: input width: 8 - name: result_x_coord direction: input width: 8 - name: result_y_coord direction: input width: 8 - name: rom_x_coord direction: input width: 8 - name: rom_y_coord direction: input width: 8
language: c compiler: clang before_script: cd code script: - make clean - make - ./test
<gh_stars>1-10 package: name: cluster_interconnect dependencies: common_cells: { git: "https://github.com/pulp-platform/common_cells.git", version: 1.22.1 } workspace: checkout_dir: "./deps" export_include_dirs: - rtl/low_latency_interco - rtl/peripheral_interco sources: # Source files grouped in levels. Files in level 0 have no dependencies on files in this # package. Files in level 1 only depend on files in level 0, files in level 2 on files in # levels 1 and 0, etc. Files within a level are ordered alphabetically. # Level 0 - rtl/tcdm_interconnect/tcdm_interconnect_pkg.sv - rtl/tcdm_interconnect/addr_dec_resp_mux.sv - rtl/tcdm_interconnect/amo_shim.sv - rtl/variable_latency_interconnect/addr_decoder.sv # Level 1 - rtl/tcdm_interconnect/xbar.sv - rtl/variable_latency_interconnect/simplex_xbar.sv # Level 2 - rtl/tcdm_interconnect/clos_net.sv - rtl/tcdm_interconnect/bfly_net.sv - rtl/variable_latency_interconnect/full_duplex_xbar.sv # Level 3 - rtl/tcdm_interconnect/tcdm_interconnect.sv - rtl/variable_latency_interconnect/variable_latency_bfly_net.sv # Level 4 - rtl/variable_latency_interconnect/variable_latency_interconnect.sv # Low-Latency Interco - rtl/low_latency_interco/FanInPrimitive_Req.sv - rtl/low_latency_interco/ArbitrationTree.sv - rtl/low_latency_interco/MUX2_REQ.sv - rtl/low_latency_interco/AddressDecoder_Resp.sv - rtl/low_latency_interco/TestAndSet.sv - rtl/low_latency_interco/RequestBlock2CH.sv - rtl/low_latency_interco/RequestBlock1CH.sv - rtl/low_latency_interco/FanInPrimitive_Resp.sv - rtl/low_latency_interco/ResponseTree.sv - rtl/low_latency_interco/ResponseBlock.sv - rtl/low_latency_interco/AddressDecoder_Req.sv - rtl/low_latency_interco/XBAR_TCDM.sv - rtl/low_latency_interco/XBAR_TCDM_WRAPPER.sv - rtl/low_latency_interco/TCDM_PIPE_REQ.sv - rtl/low_latency_interco/TCDM_PIPE_RESP.sv - rtl/low_latency_interco/grant_mask.sv - rtl/low_latency_interco/priority_Flag_Req.sv # Peripheral Interco - rtl/peripheral_interco/AddressDecoder_PE_Req.sv - rtl/peripheral_interco/AddressDecoder_Resp_PE.sv - rtl/peripheral_interco/ArbitrationTree_PE.sv - rtl/peripheral_interco/FanInPrimitive_Req_PE.sv - rtl/peripheral_interco/RR_Flag_Req_PE.sv - rtl/peripheral_interco/MUX2_REQ_PE.sv - rtl/peripheral_interco/FanInPrimitive_PE_Resp.sv - rtl/peripheral_interco/RequestBlock1CH_PE.sv - rtl/peripheral_interco/RequestBlock2CH_PE.sv - rtl/peripheral_interco/ResponseBlock_PE.sv - rtl/peripheral_interco/ResponseTree_PE.sv - rtl/peripheral_interco/XBAR_PE.sv - target: tcdm_test include_dirs: - tb/common/ - tb/tb_tcdm_interconnect/hdl files: # Level 0 - tb/tb_tcdm_interconnect/hdl/tb_pkg.sv # Level 2 - tb/tb_tcdm_interconnect/hdl/tcdm_interconnect_wrap.sv # Level 2 - tb/tb_tcdm_interconnect/hdl/tb.sv - target: variable_latency_test include_dirs: - tb/common/ - tb/tb_variable_latency_interconnect/hdl files: # Level 0 - tb/tb_variable_latency_interconnect/hdl/tb_pkg.sv # Level 2 - tb/tb_variable_latency_interconnect/hdl/variable_latency_interconnect_wrap.sv # Level 2 - tb/tb_variable_latency_interconnect/hdl/variable_latency_interconnect_tb.sv
<filename>ultra96/ROOT_FS/app/fad/data/ConstraintGenerator/TrafficLightDetector/param.yaml %YAML 1.2 --- TrafficLight: opencv_tm_method: 5 # CV_TM_SQDIFF = 0, CV_TM_SQDIFF_NORMED = 1, CV_TM_CCORR = 2, CV_TM_CCORR_NORMED = 3, CV_TM_CCOEFF = 4, CV_TM_CCOEFF_NORMED = 5 red_tl_haar_like_cascade_file_path: "/data/ConstraintGenerator/TrafficLightDetector/HaarLike/red_tl_cascade.xml" y_of_top_of_detection_area: 40 # 信号認識の対象とする画像領域のy方向の下限 y_of_bottom_of_detection_area: 220 # 信号認識の対象とする画像領域のy方向の上限 x_of_left_of_detection_area: 200 # 信号認識の対象とする画像領域のx方向の下限 x_of_right_of_detection_area: 440 # 信号認識の対象とする画像領域のx方向の上限 tm_max_val_thr: 0.675 # 有効とするテンプレートマッチングの出力値のしきい値 region_width_ratio_with_tm: 2.5 # HaarLikeの検出領域とテンプレート画像のサイズの比率が、1.0+-この比率以内であれば有効とする(横方向) region_height_ratio_with_tm: 5.0 # HaarLikeの検出領域とテンプレート画像のサイズの比率が、1.0+-この比率以内であれば有効とする(縦方向) haar_like_scale_factor: 1.01 haar_like_min_neighbors: 1 Templates: num: 10 template0: distance: 0.6 file_path: "/data/ConstraintGenerator/TrafficLightDetector/template/red_60cm.png" template1: distance: 0.7 file_path: "/data/ConstraintGenerator/TrafficLightDetector/template/red_70cm.png" template2: distance: 0.8 file_path: "/data/ConstraintGenerator/TrafficLightDetector/template/red_80cm.png" template3: distance: 0.9 file_path: "/data/ConstraintGenerator/TrafficLightDetector/template/red_90cm.png" template4: distance: 1.0 file_path: "/data/ConstraintGenerator/TrafficLightDetector/template/red_100cm.png" template5: distance: 1.1 file_path: "/data/ConstraintGenerator/TrafficLightDetector/template/red_110cm.png" template6: distance: 1.2 file_path: "/data/ConstraintGenerator/TrafficLightDetector/template/red_120cm.png" template7: distance: 1.3 file_path: "/data/ConstraintGenerator/TrafficLightDetector/template/red_130cm.png" template8: distance: 1.4 file_path: "/data/ConstraintGenerator/TrafficLightDetector/template/red_140cm.png" template9: distance: 1.5 file_path: "/data/ConstraintGenerator/TrafficLightDetector/template/red_150cm.png"
<filename>models/AI-Model-Zoo/model-list/pt_person-orientation_224_112_558M_2.0/model.yaml # Copyright 2019 Xilinx Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. description: Pedestrian's orientation classifier input size: 640*480 float ops: 36G task: denoising framework: pytorch prune: 'no' version: 2.0 files: - name: pt_person-orientation_224_112_558M_2.0 type: float & quantized board: GPU download link: https://www.xilinx.com/bin/public/openDownload?filename=pt_person-orientation_224_112_558M_2.0.zip checksum: 481d57ad14b578ba299ef77d8bc2b56a - name: person-orientation_pruned_558m_pt type: xmodel board: zcu102 & zcu104 & kv260 download link: https://www.xilinx.com/bin/public/openDownload?filename=person-orientation_pruned_558m_pt-zcu102_zcu104_kv260-r2.0.0.tar.gz checksum: 235b3f49e2dbeb60e3f8936fd2582e82 - name: person-orientation_pruned_558m_pt type: xmodel board: vck190 download link: https://www.xilinx.com/bin/public/openDownload?filename=person-orientation_pruned_558m_pt-vck190-r2.0.0.tar.gz checksum: f29ff11a67411b0d4f82cfa4bff8431f - name: person-orientation_pruned_558m_pt type: xmodel board: vck50006pe-DPUCVDX8H-DWC download link: https://www.xilinx.com/bin/public/openDownload?filename=person-orientation_pruned_558m_pt-vck50006pe-DPUCVDX8H-DWC-r2.0.0.tar.gz checksum: fb2e8ee27b1cec9b6bc0fedf59020b5d - name: person-orientation_pruned_558m_pt type: xmodel board: u50lv-DPUCAHX8H download link: https://www.xilinx.com/bin/public/openDownload?filename=person-orientation_pruned_558m_pt-u50lv-DPUCAHX8H-r2.0.0.tar.gz checksum: 821be3d07acbf06d3d69b132dfdc8ee2 - name: person-orientation_pruned_558m_pt type: xmodel board: u50lv-DPUCAHX8H-DWC & u55c-DPUCAHX8H-DWC download link: https://www.xilinx.com/bin/public/openDownload?filename=person-orientation_pruned_558m_pt-u55c-u50lv-DPUCAHX8H-DWC-r2.0.0.tar.gz checksum: 184c5f99f91f8cc1a03d37a66cab6d74 license: https://github.com/Xilinx/Vitis-AI/blob/master/LICENSE
<reponame>lavanyajagan/cocotb<filename>.github/workflows/lint.yml # Copyright cocotb contributors # Licensed under the Revised BSD License, see LICENSE for details. # SPDX-License-Identifier: BSD-3-Clause name: Lint on: pull_request: branches: - master - 'stable/**' jobs: lint-pre-commit: runs-on: ubuntu-latest name: pre-commit steps: - uses: actions/checkout@v3 - name: Set up Python uses: actions/setup-python@v3 with: python-version: "3.10" - name: pre-commit run: | pip install pre-commit pre-commit run -a
hwpe-stream: vlog_opts: [ +nowarnSVCHK, ] incdirs: [ rtl, ] files: [ rtl/hwpe_stream_package.sv, rtl/hwpe_stream_interfaces.sv, rtl/basic/hwpe_stream_assign.sv, rtl/basic/hwpe_stream_mux_static.sv, rtl/basic/hwpe_stream_demux_static.sv, rtl/basic/hwpe_stream_buffer.sv, rtl/basic/hwpe_stream_merge.sv, rtl/basic/hwpe_stream_fence.sv, rtl/basic/hwpe_stream_split.sv, rtl/basic/hwpe_stream_serialize.sv, rtl/basic/hwpe_stream_deserialize.sv, rtl/fifo/hwpe_stream_fifo_earlystall_sidech.sv, rtl/fifo/hwpe_stream_fifo_earlystall.sv, rtl/fifo/hwpe_stream_fifo_scm.sv, rtl/fifo/hwpe_stream_fifo_scm_test_wrap.sv, rtl/fifo/hwpe_stream_fifo_sidech.sv, rtl/fifo/hwpe_stream_fifo.sv, rtl/fifo/hwpe_stream_fifo_ctrl.sv, rtl/streamer/hwpe_stream_addressgen.sv, rtl/streamer/hwpe_stream_addressgen_v2.sv, rtl/streamer/hwpe_stream_addressgen_v3.sv, rtl/streamer/hwpe_stream_strbgen.sv, rtl/streamer/hwpe_stream_sink.sv, rtl/streamer/hwpe_stream_sink_realign.sv, rtl/streamer/hwpe_stream_source.sv, rtl/streamer/hwpe_stream_source_realign.sv, rtl/streamer/hwpe_stream_streamer_queue.sv, rtl/tcdm/hwpe_stream_tcdm_fifo_load.sv, rtl/tcdm/hwpe_stream_tcdm_fifo_load_sidech.sv, rtl/tcdm/hwpe_stream_tcdm_fifo_store.sv, rtl/tcdm/hwpe_stream_tcdm_fifo.sv, rtl/tcdm/hwpe_stream_tcdm_assign.sv, rtl/tcdm/hwpe_stream_tcdm_mux.sv, rtl/tcdm/hwpe_stream_tcdm_mux_static.sv, rtl/tcdm/hwpe_stream_tcdm_reorder.sv, rtl/tcdm/hwpe_stream_tcdm_reorder_static.sv, ] tb_hwpe_stream: flags: [ skip_synthesis, ] files: [ tb/tb_hwpe_stream_reservoir.sv, tb/tb_hwpe_stream_receiver.sv, ] tb_hwpe_stream_local: flags: [ only_local, skip_synthesis, ] files: [ tb/tb_hwpe_stream_sink_realign.sv, tb/tb_hwpe_stream_source_realign.sv, tb/tb_hwpe_stream_source_realign_decoupled.sv, ]