Search is not available for this dataset
content
stringlengths
0
376M
# @package _global_ do_blink: backend: vivado figure: fig1 sub_figure: b device: 3eg part: xczu3eg-sbva484-2-e num_luts: 2400 bft: bft8 use_abs: False
<gh_stars>10-100 pulp_soc: incdirs: [ ../includes, ] files: [ soc_interconnect.sv, boot_rom.sv, l2_ram_multi_bank.sv, lint_jtag_wrap.sv, periph_bus_wrap.sv, soc_clk_rst_gen.sv, soc_event_arbiter.sv, soc_event_generator.sv, soc_event_queue.sv, soc_interconnect_wrap.sv, soc_peripherals.sv, pulp_soc.sv, ]
namespace: common-ips depends: third-party: - pulp-common: repo: "https://github.com/pulp-platform/common_cells.git" branch: v1.23.0 ips: - fifo_v3: path: third-party/pulp-common/fifo_v3.sv
# Adapted from Garnet and ButterPHY name: qtm commands: - | mkdir -p build mkdir -p outputs cd build # build analog_core QTM mkdir -p analog_core cd analog_core pt_shell -f ../../analog_core.qtm.tcl cd .. # build input_buffer QTM mkdir -p input_buffer cd input_buffer pt_shell -f ../../input_buffer.qtm.tcl cd .. # build output_buffer QTM mkdir -p output_buffer cd output_buffer pt_shell -f ../../output_buffer.qtm.tcl cd .. # build mdll_r1_top QTM mkdir -p mdll_r1_top cd mdll_r1_top pt_shell -f ../../mdll_r1_top.qtm.tcl cd .. # build PI (for TX) QTM mkdir -p phase_interpolator cd phase_interpolator pt_shell -f ../../phase_interpolator.qtm.tcl cd .. # build input divider (for TX) QTM mkdir -p input_divider cd input_divider pt_shell -f ../../input_divider.qtm.tcl cd .. # build termination (for TX) QTM mkdir -p termination cd termination pt_shell -f ../../termination.qtm.tcl cd .. # link build products into output folder cd ../outputs ln -s ../build/analog_core/analog_core_lib.db analog_core_lib.db ln -s ../build/analog_core/analog_core.lib analog_core.lib ln -s ../build/input_buffer/input_buffer_lib.db input_buffer_lib.db ln -s ../build/input_buffer/input_buffer.lib input_buffer.lib ln -s ../build/output_buffer/output_buffer_lib.db output_buffer_lib.db ln -s ../build/output_buffer/output_buffer.lib output_buffer.lib ln -s ../build/mdll_r1_top/mdll_r1_top_lib.db mdll_r1_top_lib.db ln -s ../build/mdll_r1_top/mdll_r1_top.lib mdll_r1_top.lib ln -s ../build/phase_interpolator/phase_interpolator_lib.db phase_interpolator_lib.db ln -s ../build/phase_interpolator/phase_interpolator.lib phase_interpolator.lib ln -s ../build/input_divider/input_divider_lib.db input_divider_lib.db ln -s ../build/input_divider/input_divider.lib input_divider.lib ln -s ../build/termination/termination_lib.db termination_lib.db ln -s ../build/termination/termination.lib termination.lib inputs: - adk outputs: - analog_core_lib.db - analog_core.lib - input_buffer_lib.db - input_buffer.lib - output_buffer_lib.db - output_buffer.lib - mdll_r1_top_lib.db - mdll_r1_top.lib - phase_interpolator_lib.db - phase_interpolator.lib - input_divider_lib.db - input_divider.lib - termination_lib.db - termination.lib parameters: # Name of the technology library containing ADK_DRIVING_CELL qtm_tech_lib: tcbn16ffcllbwp16p90tt0p8v25c # Period of the main clock in nanoseconds # (will be scaled by constr_time_scale) constr_main_per: 0.7 # Scale factor for timing constraints constr_time_scale: 1.0
<reponame>dries007/FPGAPerformanceSuite --- # YAML Front Matter. All of this is available in the latex template. title: FPGA Benchmarking # subtitle: '**DRAFT** Compiled \DTMnow' institute: KULeuven, Faculty of Engineering Technology, Campus De Nayer, Sint-katelijne-waver opleiding: Elektronica-ICT afdeling: Afstudeerrichting ICT author: - <NAME> promotor: - type: 'Promotor:' name: dr. ing. <NAME> - type: 'Co-promotoren:' name: ir. <NAME> language: english date: Academiejaar 2018-2019 keywords: - FPGA Benchmarking - Resource estimation # LaTex template variables, KULeuven style guides fontsize: 11pt linestretch: 1.25 # Font # fontfamily: arev # fontfamily: merriweather # fontfamilyoptions: sfdefault papersize: a4 pagestyle: fancy classoption: - twoside - openright - table documentclass: report margin-left: 3.5cm margin-right: 2.5cm margin-top: 3.5cm margin-bottom: 3.5cm lof: false # List of Figures lot: false # List of Tables toc: false # Table of Contents glossary: false # List of Symbols and Abbreviations listings: true caption: true codeBlockCaptions: true lang: en # For babel numbersections: true secnumdepth: 3 # Manually added in a separate file. # This is required to preserve the ordering of the bibliography before the appendix. bibliography: bibliography.bib suppress-bibliography: true biblio-style: plainnat cite-style: round natbib: true link-citations: true # Show all references even if not used in the text. # nocite: '*' figPrefix: - "figure" - "figures" tblPrefix: - "table" - "tables" eqnPrefix: - "equation" - "equations" lstPrefix: - "listing" - "listings" secPrefix: - "section" - "sections" pairDelim: "and" lastDelim: ", and" linkReferences: true nameInLink: true # Preamble pages # -------------- copyright: | © Copyright KU Leuven Without written permission of the supervisor(s) and the author(s) it is forbidden to reproduce or adapt in any form or by any means any part of this publication. Requests for obtaining the right to reproduce or utilise parts of this publication should be addressed to KU Leuven, Technology Campus De Nayer, <NAME>laan 5, B-2860 Sint-Katelijne-Waver, +32 15 31 69 44 or via e-mail <EMAIL>. A written permission of the supervisor(s) is also required to use the methods, products, schematics and programs described in this work for industrial or commercial use, and for submitting this publication in scientific contests.
<filename>src_files.yml ape_core: vlog_opts: [ # -L common_cells_lib ] incdirs: - include # - ../../common_cells/include files: # Source files grouped in levels. Files in level 0 have no dependencies on files in this # package. Files in level 1 only depend on files in level 0, files in level 2 on files in # levels 1 and 0, etc. Files within a level are ordered alphabetically. # Level 0 - src/ape_pkg.sv # Level 1 - src/ape_core.sv # Level 2 ape_core_sim: files: - test/ape_test.sv flags: - skip_synthesis - only_local
package: name: icache_mp_128_pf dependencies: axi_slice: { git: "https://github.com/pulp-platform/axi_slice.git", version: 1.1.3 } common_cells: { git: "https://github.com/pulp-platform/common_cells.git", version: 1.10.0 } icache-intc: { git: "https://github.com/pulp-platform/icache-intc.git", version: 1.0.0 } scm: { git: "https://github.com/pulp-platform/scm.git", version: 1.0.0 } sources: # Level 0 - RTL/icache_bank_mp_128.sv - RTL/icache_bank_mp_PF.sv - RTL/merge_refill_cam_128_16.sv - RTL/pf_miss_mux.sv - RTL/prefetcher_if.sv # Level 1 - RTL/central_controller_128.sv # Level 2 - RTL/cache_controller_to_axi_128_PF.sv # Level 3 - RTL/icache_top_mp_128_PF.sv
<gh_stars>0 riscv: incdirs: [ include, ../../rtl/includes ] files: [ include/apu_core_package.sv, include/riscv_defines.sv, include/riscv_tracer_defines.sv, riscv_alu.sv, riscv_alu_basic.sv, riscv_alu_div.sv, riscv_compressed_decoder.sv, riscv_controller.sv, riscv_cs_registers.sv, riscv_debug_unit.sv, riscv_decoder.sv, riscv_int_controller.sv, riscv_ex_stage.sv, riscv_hwloop_controller.sv, riscv_hwloop_regs.sv, riscv_id_stage.sv, riscv_if_stage.sv, riscv_load_store_unit.sv, riscv_mult.sv, riscv_prefetch_buffer.sv, riscv_prefetch_L0_buffer.sv, riscv_core.sv, riscv_apu_disp.sv, riscv_fetch_fifo.sv, riscv_L0_buffer.sv, ] riscv_vip_rtl: targets: [ rtl, ] incdirs: [ include, ] files: [ riscv_tracer.sv, riscv_simchecker.sv, ] riscv_regfile_rtl: targets: [ rtl, tsmc55, gf22, ] incdirs: [ include, ] files: [ riscv_register_file_latch.sv, ] riscv_regfile_verilator: targets: [ verilator, ] files: [ riscv_register_file.sv, ] riscv_regfile_fpga: targets: [ xilinx, ] incdirs: [ include, ] files: [ riscv_register_file.sv, ]
<filename>quickfeather/source/qorc-sdk/TinyFPGA-Programmer-Application/b-series/.travis.yml sudo: false language: python python: - 2.7 - 3.6 install: - pip install tox-travis script: - cd programmer && tox
metadata: name: mmio entity: bus-flatten: yes bus-prefix: mmio_ clock-name: kcd_clk reset-name: kcd_reset features: bus-width: 64 optimize: yes interface: flatten: yes fields: - address: 0b0--- name: AFU_DHF behavior: constant value: 17293826967149215744 # [63:60]: 1 && [40]: 1 - address: 0b1--- name: AFU_ID_L behavior: constant value: 13797985263751972578 # check sum.json - address: 0b10--- name: AFU_ID_H behavior: constant value: 13609688667197753651 # check sum.json - address: 0b11--- name: DFH_RSVD0 behavior: constant value: 0 - address: 0b100--- name: DFH_RSVD1 behavior: constant value: 0 - address: 64 name: start doc: Start the kernel. bitrange: 0 behavior: strobe - address: 64 name: stop doc: Stop the kernel. bitrange: 1 behavior: strobe - address: 64 name: reset doc: Reset the kernel. bitrange: 2 behavior: strobe - address: 68 name: idle doc: Kernel idle status. bitrange: 32 behavior: status - address: 68 name: busy doc: Kernel busy status. bitrange: 33 behavior: status - address: 68 name: done doc: Kernel done status. bitrange: 34 behavior: status - address: 72 name: result doc: Result. bitrange: 63..0 behavior: status - address: 80 name: ExampleBatch_firstidx doc: ExampleBatch first index. bitrange: 31..0 behavior: control - address: 84 name: ExampleBatch_lastidx doc: ExampleBatch last index (exclusive). bitrange: 63..32 behavior: control - address: 88 name: ExampleBatch_number_values doc: Buffer address for ExampleBatch number_values bitrange: 63..0 behavior: control - address: 96 name: Profile_enable doc: Activates profiler counting when this bit is high. bitrange: 0 behavior: control - address: 100 name: Profile_clear doc: Resets profiler counters when this bit is asserted. bitrange: 32 behavior: strobe
<reponame>CansWang/open-source-phy_SKY130 name: inject_dont_touch commands: - | mkdir -p outputs cat inputs/design.sdc dont_touch.tcl > outputs/design.sdc inputs: - design.sdc outputs: - design.sdc
<filename>ips_list.yml zero-riscy: commit: tags/pulpissimo-v1.0 group: pulp-platform hwpe-ctrl: commit: v1.5 group: pulp-platform hwpe-stream: commit: master group: pulp-platform tech_cells_generic: commit: v0.1.6 group: pulp-platform hci: commit: master group: pulp-platform scm: commit : afd3240 group: pulp-platform
name: atomsim-hydrogensoc on: push: branches: [ main ] pull_request: branches: [ main ] # Allows you to run this workflow manually from the Actions tab workflow_dispatch: jobs: build: name: atomsim-hydrogensoc runs-on: ubuntu-20.04 steps: - uses: actions/checkout@v2 with: path: riscv-atom - name: Install Verilator run: sudo apt-get install verilator - name: Set RVATOM directory run: echo "RVATOM=$GITHUB_WORKSPACE/riscv-atom" >> $GITHUB_ENV - name: Build AtomSim for Hydrogensoc Target run: cd riscv-atom && pwd && make Target=hydrogensoc sim
image: centos:7 stages: - build - test - review - staging - production - cleanup variables: EURYSPACE_CI_SERVER: sources.euryecetelecom.com EURYSPACE_DOMAIN: euryspace.org build: stage: build image: docker:git services: - docker:dind variables: DOCKER_DRIVER: overlay2 script: - setup_docker test: stage: test image: gliderlabs/herokuish:latest before_script: [] script: - echo run tests only: - branches codequality: image: docker:latest variables: DOCKER_DRIVER: overlay2 allow_failure: true services: - docker:dind script: - setup_docker - codeclimate artifacts: paths: [codeclimate.json] .deploy: &deploy script: - echo deploy review: <<: *deploy stage: review variables: APP: review-$CI_COMMIT_REF_NAME APP_HOST: $CI_PROJECT_NAME-$CI_ENVIRONMENT_SLUG.$EURYSPACE_DOMAIN script: - echo review environment: name: review/$CI_COMMIT_REF_NAME url: https://$CI_PROJECT_NAME-$CI_ENVIRONMENT_SLUG.$EURYSPACE_DOMAIN on_stop: stop-review only: - branches except: - master stop-review: <<: *deploy stage: cleanup script: - echo cleanup when: manual variables: APP: review-$CI_COMMIT_REF_NAME GIT_STRATEGY: none environment: name: review/$CI_COMMIT_REF_NAME action: stop only: - branches except: - master staging: <<: *deploy stage: staging script: - echo staging variables: APP: staging APP_HOST: $CI_PROJECT_NAME-staging.$EURYSPACE_DOMAIN environment: name: staging url: https://$CI_PROJECT_NAME-staging.$EURYSPACE_DOMAIN only: - master production: <<: *deploy stage: production script: - echo production variables: APP: production APP_HOST: $CI_PROJECT_NAME.$EURYSPACE_DOMAIN when: manual environment: name: production url: https://$CI_PROJECT_NAME.$EURYSPACE_DOMAIN only: - master
<filename>testing/memcached/memcached_submission.yml<gh_stars>10-100 description: >- Updated for post-SIGCOMM submission. Contains data for booster and no booster with a zipf coefficient of 1.00, and key space of 10,000. experiment: memcached repositories: P4Boosters: 036cd6d9 files: documentation.md: Memcached.md data: data/ analysis.ipynb: analysis/TofinoMemcached.ipynb shremote_config.yml: execution/cfgs/tofino_moongen.yml bitstream.tar.gz: /home/iped/MemcachedFPGA.tar.gz
<reponame>StanfordVLSI/dragonphy2 dt: 0.1e-6 func_order: 1 func_numel: 512 func_domain: [0.0, 1.0e-9] func_widths: [18, 18] func_exps: [-16, -16] chunk_width: 8 num_chunks: 4 slices_per_bank: 4 num_banks: 4 pi_ctl_width: 9 vref_rx: 0.3 vref_tx: 0.3 n_adc: 8 freq_tx: 16.0e+9 freq_rx: 4.0e+9 use_jitter: true use_noise: true
"7020": arch: "zynq7_z020" device_family: "xc7z020" device_name: "fig1" device_speed: "clg400-1" device: "xc7z020-fig1-roi-virt" board: "pynqz1" timeout: 200
# Copyright 2019 Xilinx Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. description: refinedet pedestrian detector. input size: 360*480 float ops: 5.08G task: detection framework: caffe prune: '0.96' version: 2.0 files: - name: cf_refinedet_coco_360_480_0.96_5.08G_2.0 type: float & quantized board: GPU download link: https://www.xilinx.com/bin/public/openDownload?filename=cf_refinedet_coco_360_480_0.96_5.08G_2.0.zip checksum: 5c6518c2b36486f7045b28a3312dd1aa - name: refinedet_pruned_0_96 type: xmodel board: zcu102 & zcu104 & kv260 download link: https://www.xilinx.com/bin/public/openDownload?filename=refinedet_pruned_0_96-zcu102_zcu104_kv260-r2.0.0.tar.gz checksum: 0059ed8a29cee180d510c026e133ac82 - name: refinedet_pruned_0_96 type: xmodel board: vck190 download link: https://www.xilinx.com/bin/public/openDownload?filename=refinedet_pruned_0_96-vck190-r2.0.0.tar.gz checksum: 94d35a7e0fafe03365989917973f149f - name: refinedet_pruned_0_96 type: xmodel board: vck50006pe-DPUCVDX8H-DWC download link: https://www.xilinx.com/bin/public/openDownload?filename=refinedet_pruned_0_96-vck50006pe-DPUCVDX8H-DWC-r2.0.0.tar.gz checksum: 1b50d4f3fd7bc1da5a1c8f05dc8481c8 - name: refinedet_pruned_0_96 type: xmodel board: vck50008pe-DPUCVDX8H download link: https://www.xilinx.com/bin/public/openDownload?filename=refinedet_pruned_0_96-vck50008pe-DPUCVDX8H-r2.0.0.tar.gz checksum: 925ebd11b33667a6d198767ae162a544 - name: refinedet_pruned_0_96 type: xmodel board: u50lv-DPUCAHX8H download link: https://www.xilinx.com/bin/public/openDownload?filename=refinedet_pruned_0_96-u50lv-DPUCAHX8H-r2.0.0.tar.gz checksum: 256100719d8f31295a4a00ed4fbca86f - name: refinedet_pruned_0_96 type: xmodel board: u50lv-DPUCAHX8H-DWC & u55c-DPUCAHX8H-DWC download link: https://www.xilinx.com/bin/public/openDownload?filename=refinedet_pruned_0_96-u55c-u50lv-DPUCAHX8H-DWC-r2.0.0.tar.gz checksum: 3065b4a29bc47d7b615e3c8ad51c9f39 license: https://github.com/Xilinx/Vitis-AI/blob/master/LICENSE
name: manual-wf on: [workflow_dispatch] jobs: ls-recursive: runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 - run: ls -R - run: echo Hi There
<reponame>asb/opentitan name: CI Checks on: push: branches: ["**"] pull_request: branches: [main] workflow_dispatch: jobs: spell-check: runs-on: ubuntu-latest steps: - name: Checkout Parent Repo uses: actions/checkout@v2 with: ref: main repository: aws/aws-iot-device-sdk-embedded-C path: main - name: Clone This Repo uses: actions/checkout@v2 with: path: ./kernel - name: Install spell run: | sudo apt-get install spell sudo apt-get install util-linux - name: Check spelling run: | PATH=$PATH:main/tools/spell # Make sure that the portable directory is not included in the spellcheck. sed -i 's/find $DIRNAME/find $DIRNAME -not -path '*portable*'/g' main/tools/spell/find-unknown-comment-words find-unknown-comment-words --directory kernel/ --lexicon ./kernel/.github/lexicon.txt if [ "$?" = "0" ]; then exit 0 else exit 1 fi formatting: runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 - name: Install Uncrustify run: sudo apt-get install uncrustify - name: Run Uncrustify run: | uncrustify --version find . \( -name portable \) -prune -false -o -iname "*.[hc]" -exec uncrustify --check -c .github/uncrustify.cfg {} + - name: Check For Trailing Whitespace run: | set +e grep --exclude="README.md" --exclude-dir="portable" -rnI -e "[[:blank:]]$" . if [ "$?" = "0" ]; then echo "Files have trailing whitespace." exit 1 else exit 0 fi url-check: runs-on: ubuntu-latest steps: - name: Clone This Repo uses: actions/checkout@v2 with: path: ./kernel - name: URL Checker run: | bash kernel/.github/actions/url_verifier.sh kernel
<reponame>StanfordAHA/garnet<filename>mflowgen/common/rtl/configure.yml name: rtl commands: - bash gen_rtl.sh outputs: - design.v parameters: array_width: 4 array_height: 2 glb_tile_mem_size: 256 num_glb_tiles: 2 pipeline_config_interval: 8 interconnect_only: False glb_only: False soc_only: False PWR_AWARE: True # If true, use docker container for python environment use_container: True # If true, clone garnet locally and copy into the container use_local_garnet: True # To try out a new docker image e.g. 'stanfordaha/garnet:cst' # - set 'save_verilog_to_tmpdir' to "True", then build (latest) rtl # - set 'which_image' to "cst", then build (cst) rtl # - should see before-and-after designs in /tmp directory: # # % ls -lt /tmp/design.v.* # 1745336 Feb 5 10:47 design.v.cst.deleteme13246 # 1785464 Feb 5 10:39 design.v.latest.deleteme9962 # # which_image: cst # If not set, defaults to 'latest' # save_verilog_to_tmpdir: True # If true, copies final verilog to /tmp postconditions: - assert File( 'outputs/design.v' ) # must exist
<gh_stars>100-1000 package: name: pmp authors: - "<NAME> <<EMAIL>>" export_include_dirs: - include sources: # packages - include/riscv.sv # sources - src/pmp_entry.sv - src/pmp.sv - target: simulation files: - tb/pmp_tb.sv
# Copyright 2020 ETH Zurich and University of Bologna. # Solderpad Hardware License, Version 0.51, see LICENSE for details. # SPDX-License-Identifier: SHL-0.51 package: name: snitch authors: - <NAME> <<EMAIL>> - <NAME> <<EMAIL>> dependencies: axi: {path: ../../vendor/pulp_platform_axi} common_cells: {path: ../../vendor/pulp_platform_common_cells} fpnew: {path: ../../vendor/pulp_platform_fpnew} reqrsp_interface: {path: ../reqrsp_interface} riscv-dbg: {path: ../../vendor/pulp_platform_riscv_dbg} export_include_dirs: - include sources: # Level 0: - src/snitch_pma_pkg.sv - src/riscv_instr.sv # Level 1: - src/snitch_pkg.sv # Level 2: - src/snitch_regfile_ff.sv # - src/snitch_regfile_fpga.sv # - src/snitch_regfile_latch.sv - src/snitch_lsu.sv - src/snitch_l0_tlb.sv # Level 1: - target: not(disable_pmcs) defines: SNITCH_ENABLE_PERF: files: - src/snitch.sv # Disable the performance monitoring counters to save area. - target: disable_pmcs files: - src/snitch.sv - target: test files: - test/snitch_l0_tlb_tb.sv
# This is a basic workflow to help you get started with Actions name: riscv-arch-test # Controls when the workflow will run on: # Triggers the workflow on push or pull request events but only for the master branch push: branches: [ master ] pull_request: branches: [ master ] # Allows you to run this workflow manually from the Actions tab workflow_dispatch: # A workflow run is made up of one or more jobs that can run sequentially or in parallel jobs: # This workflow contains a single job called "build" build: # The type of runner that the job will run on runs-on: ubuntu-latest strategy: fail-fast: false matrix: xlen: # - 32 - 64 device: - I - C - M - privilege - Zifencei name: "RISC-V Compatibility Check" # Steps represent a sequence of tasks that will be executed as part of the job steps: # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it - name: "repository checkout" uses: actions/checkout@v2 with: submodules: recursive - name: "Setup RISC-V GCC" run: | mkdir riscv wget https://github.com/riscv-collab/riscv-gnu-toolchain/releases/download/2021.09.16/riscv64-elf-ubuntu-20.04-nightly-2021.09.16-nightly.tar.gz tar -xzf riscv64-elf-ubuntu-20.04-nightly-2021.09.16-nightly.tar.gz ls -al riscv # - name: "Setup Verilator" # run: | # sudo apt install libsystemc # wget https://github.com/sifive/verilator/releases/download/4.204-0sifive3/verilator_4.204-0sifive3_amd64.deb # sudo dpkg -i verilator_4.204-0sifive3_amd64.deb - name: 'Run RISC-V Architecture Tests' uses: verilator/verilator:stable env: RISCV_TARGET: r5p XLEN: ${{ matrix.xlen }} RISCV_DEVICE: ${{ matrix.device }} run: | export PATH=`pwd`/riscv/bin/:$PATH cd test TARGETDIR=`pwd` WORK=`pwd`/work make -C ../submodules/riscv-arch-test verify
<filename>Project-Einstein/.github/workflows/main.yml name: mkdocs on: push: branches: - docs # Environment env: CI: true PYTHON_VERSION: 3.x # Jobs to run jobs: # Build and deploy documentation site deploy: runs-on: ubuntu-latest steps: # Checkout source form GitHub - uses: actions/checkout@v2 # Install Python runtime and dependencies - uses: actions/setup-python@v1 with: python-version: ${{ env.PYTHON_VERSION }} # pip - run: | pip install -r requirements-mkdocs.txt pip install mkdocs-minify-plugin>=0.2 # Set configuration for repository and deploy documentation - env: GH_TOKEN: ${{ secrets.mkdocs }} GH_NAME: ci GH_EMAIL: <EMAIL> run: | REMOTE="https://${GH_TOKEN}@github.com/${GITHUB_REPOSITORY}" git config --global user.name "${GH_NAME}" git config --global user.email "${GH_EMAIL}" git remote set-url origin ${REMOTE} # deploy - run: | mkdocs gh-deploy --force mkdocs --version
name: pytest-icarus on: [push] jobs: build: runs-on: ubuntu-20.04 steps: - uses: actions/checkout@v2 - name: Set up Python 3.x uses: actions/setup-python@v2 with: python-version: '3.x' - name: Install dependencies run: | python -m pip install --upgrade pip pip install pytest pytest-xdist sudo apt-get install iverilog - name: Test code working-directory: ./sim run: | pytest -n auto
<reponame>shady831213/jarvism<filename>.travis.yml language: go go: - "1.11.x" - "1.12.x" install: go get -u github.com/shady831213/jarvism script: #test - go test ./... #install - go install #go fmt - gofmt -s -l .
<gh_stars>0 # Refs # customizing the boot # - https://docs.travis-ci.com/user/customizing-the-build/#Build-Matrix # Sudo enabled faz com que o travis inicialize # uma máquina ubuntu full, caso contrário # ele inicializa um docker. # com a contrapartida de levar mais tempo no boot # ref : https://docs.travis-ci.com/user/reference/overview/ sudo: enabled dist: trusty language: java jdk: - oraclejdk8 # linguagens de desenvolvimento utilizada no build language: python python: - '3.5' # command to install dependencies install: - pip install -r requirements.txt before_script: # dependencias modelsim - sudo dpkg --add-architecture i386 - sudo apt-get update - sudo apt-get install build-essential - sudo apt-get install gcc-multilib g++-multilib lib32z1 lib32stdc++6 lib32gcc1 expat:i386 fontconfig:i386 libfreetype6:i386 libexpat1:i386 libc6:i386 libgtk-3-0:i386 libcanberra0:i386 libpng12-0:i386 libice6:i386 libsm6:i386 libncurses5:i386 zlib1g:i386 libx11-6:i386 libxau6:i386 libxdmcp6:i386 libxext6:i386 libxft2:i386 libxrender1:i386 libxt6:i386 libxtst6:i386 # modelSim # download - wget http://download.altera.com/akdlm/software/acdsinst/17.1std/590/ib_installers/ModelSimSetup-17.1.0.590-linux.run -O modelsim.run # instal modelsim - chmod +x modelsim.run - ./modelsim.run --mode unattended --accept_eula 1 --installdir $HOME # corrigi bug modelsim com kernel 4 - sed -i '209 a\ 4.[0-9]*) vco="linux" ;;' $HOME/modelsim_ase/vco # define o modelsim como simulador para o VUNIT - export VUNIT_SIMULATOR=modelsim - export VUNIT_MODELSIM_PATH=$HOME/modelsim_ase/bin/ script: - python Projetos/0-Infra/testeVHDL.py - python Projetos/C-LogicaCombinacional/script/testeLogicaCombinacional.py - python Projetos/D-UnidadeLogicaAritmetica/script/testeULA.py - python Projetos/E-LogicaSequencial/script/testeLogicaSequencial.py - python Projetos/F-Assembly/scripts/testeAssembly.py - python Projetos/G-Computador/script/testeAssemblyMyCPU.py - python Projetos/G-Computador/script/testeControlUnit.py
<reponame>slaclab/atlas-rd53-fmc-dev GitBase: .. Targets: AtlasCdr53bSpiFeb10GbE: ImageDir: targets/AtlasCdr53bSpiFeb10GbE/images Extensions: - bit - mcs AtlasRd53Feb10GbE: ImageDir: targets/AtlasRd53Feb10GbE/images Extensions: - bit - mcs AtlasRd53FebPgp3: ImageDir: targets/AtlasRd53FebPgp3/images Extensions: - bit - mcs AtlasRd53FebPgp4: ImageDir: targets/AtlasRd53FebPgp4/images Extensions: - bit - mcs AtlasRd53FmcXilinxKc705_RJ45_1GbE: ImageDir: targets/AtlasRd53FmcXilinxKc705_RJ45_1GbE/images Extensions: - bit - mcs AtlasRd53FmcXilinxKc705_SFP_PGPv4: ImageDir: targets/AtlasRd53FmcXilinxKc705_SFP_PGPv4/images Extensions: - bit - mcs AtlasRd53FmcXilinxKc705Pcie: ImageDir: targets/AtlasRd53FmcXilinxKc705Pcie/images Extensions: - bit - mcs AtlasRd53FmcXilinxKcu105_RJ45_1GbE: ImageDir: targets/AtlasRd53FmcXilinxKcu105_RJ45_1GbE/images Extensions: - bit - mcs AtlasRd53FmcXilinxKcu105_SFP_10GbE: ImageDir: targets/AtlasRd53FmcXilinxKcu105_SFP_10GbE/images Extensions: - bit - mcs AtlasRd53FmcXilinxKcu105_SFP_PGPv4: ImageDir: targets/AtlasRd53FmcXilinxKcu105_SFP_PGPv4/images Extensions: - bit - mcs AtlasRd53FmcXilinxKcu105Pcie: ImageDir: targets/AtlasRd53FmcXilinxKcu105Pcie/images Extensions: - bit - mcs AtlasRd53FmcXilinxZcu102: ImageDir: targets/AtlasRd53FmcXilinxZcu102/images Extensions: - bin AtlasRd53FmcXilinxZcu102_WithoutPS_SFP_1GbE: ImageDir: targets/AtlasRd53FmcXilinxZcu102_WithoutPS_SFP_1GbE/images Extensions: - bin AtlasRd53FmcXilinxZcu102_WithoutPS_SFP_10GbE: ImageDir: targets/AtlasRd53FmcXilinxZcu102_WithoutPS_SFP_10GbE/images Extensions: - bin AtlasRd53FmcXilinxZcu102DmaLoopback: ImageDir: targets/AtlasRd53FmcXilinxZcu102DmaLoopback/images Extensions: - bin Releases: all: Primary: True Targets: - AtlasCdr53bSpiFeb10GbE - AtlasRd53Feb10GbE - AtlasRd53FebPgp3 - AtlasRd53FebPgp4 - AtlasRd53FmcXilinxKc705_RJ45_1GbE - AtlasRd53FmcXilinxKc705_SFP_PGPv4 - AtlasRd53FmcXilinxKc705Pcie - AtlasRd53FmcXilinxKcu105_RJ45_1GbE - AtlasRd53FmcXilinxKcu105_SFP_10GbE - AtlasRd53FmcXilinxKcu105_SFP_PGPv4 - AtlasRd53FmcXilinxKcu105Pcie - AtlasRd53FmcXilinxZcu102 - AtlasRd53FmcXilinxZcu102_WithoutPS_SFP_1GbE - AtlasRd53FmcXilinxZcu102_WithoutPS_SFP_10GbE - AtlasRd53FmcXilinxZcu102DmaLoopback Types: - Firmware-Only AtlasCdr53bSpiFeb10GbE: Targets: - AtlasCdr53bSpiFeb10GbE Types: - Firmware-Only
<reponame>mayankmanj/acl2<gh_stars>100-1000 language: lisp env: matrix: # - LISP=abcl # - LISP=allegro - LISP=sbcl - LISP=sbcl32 - LISP=ccl - LISP=ccl32 - LISP=clisp - LISP=clisp32 # - LISP=cmucl - LISP=ecl matrix: allow_failures: - env: LISP=ecl install: - curl -L https://github.com/luismbo/cl-travis/raw/master/install.sh | sh - if [ "${LISP:(-2)}" = "32" ]; then sudo apt-get install libc6-dev-i386; fi script: - cl -e '(ql:quickload :cffi-grovel) (ql:quickload :trivial-features-tests) (unless (trivial-features-tests:run) (uiop:quit 1))'
<reponame>Zacarhay/RFSoC2x2-PYNQ docs_list_title: downloads docs: - title: PYNQ image url: http://www.pynq.io/board.html - title: Board Files url: ./board_files.html - title: XDC files url: https://github.com/Xilinx/RFSoC2x2-PYNQ/blob/master/board/RFSoC2x2/base/vivado/constraints/base.xdc
image: file: .gitpod.Dockerfile tasks: - before: > pip3 install -e . && export COCOTB_REDUCED_LOG_FMT=1 init: > gp preview https://docs.cocotb.org/ && cd /workspace/cocotb/examples/simple_dff && gp open dff.sv && gp open dff.vhdl && gp open Makefile && gp open test_dff.py && make SIM=icarus && EXTRA_ARGS="--trace-fst --trace-structs" make SIM=verilator && gtkwave dump.fst --rcvar 'initial_window_x 1920' --rcvar 'initial_window_y 1061' --rcvar 'do_initial_zoom_fit yes' & command: > cd /workspace/cocotb/examples/simple_dff ; history -s make SIM=cvc ; history -s make SIM=ghdl TOPLEVEL_LANG=vhdl ; history -s make SIM=icarus ; history -s EXTRA_ARGS=\"--trace-fst --trace-structs\" make SIM=verilator ; history -s gtkwave dump.fst --rcvar \'initial_window_x 1920\' --rcvar \'initial_window_y 1061\' --rcvar \'do_initial_zoom_fit yes\' ; history -d 3 # NOTE: the geometry for gtkwave's fullscreen size can be found with xwininfo -root # https://www.gitpod.io/docs/config-ports/ ports: - port: 6080 # VNC for e.g. gtkwave onOpen: notify - port: 5900 onOpen: ignore vscode: extensions: - [email protected]:RPslnvyzniF7C66mxHT+Hg== - [email protected]:fJXQenGkzoZwUN/RddpuSw==
<reponame>mtdsousa/antlr4-verilog # Use the latest 2.1 version of CircleCI pipeline process engine. # See: https://circleci.com/docs/2.0/configuration-reference version: 2.1 # Define a job to be invoked later in a workflow. # See: https://circleci.com/docs/2.0/configuration-reference/#jobs jobs: build-test: # Specify the execution environment. You can specify an image from Dockerhub or use one of our Convenience Images from CircleCI's Developer Hub. # See: https://circleci.com/docs/2.0/configuration-reference/#docker-machine-macos-windows-executor docker: - image: cimg/python:3.6.15 # Add steps to the job # See: https://circleci.com/docs/2.0/configuration-reference/#steps steps: - checkout - run: name: "Build Python package" command: | python3 -m venv venv . venv/bin/activate python3 -m pip install --upgrade build python3 -m build - run: name: "Run Python unit tests" command: | python3 -m venv venv . venv/bin/activate python3 -m pip install `ls -t dist/*.whl | head -1` python3 -m unittest test/test.py # Invoke jobs via workflows # See: https://circleci.com/docs/2.0/configuration-reference/#workflows workflows: build-test-workflow: jobs: - build-test
<filename>conf/experiments/fpt2021/fig1a_vivado_7010.yaml # @package _global_ do_blink: backend: vivado figure: fig1 sub_figure: a device: 7010 part: xc7z010clg400-1 num_luts: 2400 bft: bft8
#----------------------------------------------------------------------------- # This file is part of the 'SPACE SMURF RFSOC'. It is subject to # the license terms in the LICENSE.txt file found in the top-level directory # of this distribution and at: # https://confluence.slac.stanford.edu/display/ppareg/LICENSE.html. # No part of the 'SPACE SMURF RFSOC', including this file, may be # copied, modified, propagated, or distributed except according to the terms # contained in the LICENSE.txt file. #----------------------------------------------------------------------------- GitBase: .. TopRoguePackage: simple_zcu208_example RoguePackages: - submodules/surf/python - python # RogueConfig: # - ../software/config # RogueScripts: # - ../software/scripts/devGui Targets: SimpleZcu208Example: ImageDir: targets/SimpleZcu208Example/images Extensions: - ltx - xsa - bit Releases: all: Primary: True Targets: - SimpleZcu208Example Types: - Rogue
<reponame>chebykinn/university<filename>metro/abstract/report/style.yaml --- documentclass: extreport fontsize: 14pt mainfont: "Times New Roman" sansfont: "Liberation Sans" monofont: "DejaVu Sans Mono" subparagraph: yes geometry: - left=2.5cm - right=1.0cm - top=2.0cm - bottom=2.0cm toc: yes link-citations: yes ---
<reponame>hito0512/Vitis-AI<filename>models/AI-Model-Zoo/model-list/tf_mobilenetv2_cityscapes_1024_2048_132.74G_2.0/model.yaml # Copyright 2019 Xilinx Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. description: Deeplabv3+(Mobilenetv2) segmentation on Cityscapes. input size: 1024*2048 float ops: 132.74G task: segmentation framework: tensorflow prune: 'no' version: 2.0 files: - name: tf_mobilenetv2_cityscapes_1024_2048_132.74G_2.0 type: float & quantized board: GPU download link: https://www.xilinx.com/bin/public/openDownload?filename=tf_mobilenetv2_cityscapes_1024_2048_132.74G_2.0.zip checksum: 4f0c42d8e9faf02896681fa5f875a968 - name: mobilenet_v2_cityscapes_tf type: xmodel board: zcu102 & zcu104 & kv260 download link: https://www.xilinx.com/bin/public/openDownload?filename=mobilenet_v2_cityscapes_tf-zcu102_zcu104_kv260-r2.0.0.tar.gz checksum: 37d1b9c53858bfad526de9b64e06edae - name: mobilenet_v2_cityscapes_tf type: xmodel board: vck190 download link: https://www.xilinx.com/bin/public/openDownload?filename=mobilenet_v2_cityscapes_tf-vck190-r2.0.0.tar.gz checksum: c3623e080839c5aff4575b90b4029ad5 license: https://github.com/Xilinx/Vitis-AI/blob/master/LICENSE
blank_issues_enabled: false contact_links: - name: Bug url: https://github.com/openhwgroup/cv32e41p/issues/new?template=bug.md about: For bugs in the RTL, Documentation, Verification environment or Tool and Build system. labels: "Type:Bug" - name: Task url: https://github.com/openhwgroup/cv32e41p/issues/new?template=task.md about: For any task except bug fixes. labels: "Type:Task" - name: Question url: https://github.com/openhwgroup/cv32e41p/issues/new?template=question.md about: For general questions. labels: "Type:Question" - name: Enhancement url: https://github.com/openhwgroup/cv32e41p/issues/new?template=enhancement.md about: For feature requests and enhancements. labels: "Type:Enhancement"
package: name: common_cells authors: - "<NAME> <<EMAIL>>" - "<NAME> <<EMAIL>>" - "<NAME> <<EMAIL>>" - "<NAME> <<EMAIL>>" dependencies: tech_cells_generic: { git: "https://github.com/pulp-platform/tech_cells_generic.git", version: 0.1.1 } sources: - src/fifo_v1.sv - src/fifo_v2.sv - src/fifo_v3.sv - src/lfsr_8bit.sv - src/spill_register.sv - src/stream_register.sv - src/stream_mux.sv - src/stream_demux.sv - src/cdc_2phase.sv - src/cdc_fifo_2phase.sv - src/cdc_fifo_gray.sv - src/onehot_to_bin.sv - src/rstgen.sv - src/rstgen_bypass.sv - src/edge_propagator_tx.sv - src/edge_propagator_rx.sv - src/edge_propagator.sv - src/lzc.sv - src/rrarbiter.sv - src/sync_wedge.sv - src/sync.sv - src/graycode.sv - src/clk_div.sv - src/edge_detect.sv - src/serial_deglitch.sv - src/counter.sv - src/mv_filter.sv - target: simulation files: - src/sram.sv - target: test files: - test/fifo_tb.sv - test/stream_register_tb.sv - test/cdc_2phase_tb.sv - test/cdc_fifo_tb.sv - test/graycode_tb.sv # deprecated modules - src/deprecated/find_first_one.sv - src/deprecated/generic_fifo.sv - src/deprecated/generic_LFSR_8bit.sv - src/deprecated/pulp_sync_wedge.sv - src/deprecated/pulp_sync.sv - src/deprecated/clock_divider.sv - src/deprecated/clock_divider_counter.sv
# Device description for the Keithley 2634b Sourcemeter. # set_ function expect a parameter, get_ function return a parameter. # Just the very basic commands are implemented here. identifier : Keithley Instruments Inc., Model 2634B channel 1: on : smua.source.output = 1 off : smua.source.output = 0 get_current : print(smua.measure.i()) set_voltage : smua.source.levelv = get_voltage : print(smua.measure.v()) set_mode_measure_current : display.smua.measure.func = display.MEASURE_DCAMPS channel 2: on : smub.source.output = 1 off : smub.source.output = 0 get_current : print(smub.measure.i()) set_voltage : smub.source.levelv = get_voltage : print(smub.measure.v()) set_mode_measure_current : display.smub.measure.func = display.MEASURE_DCAMPS on : smua.source.output = 1 off : smua.source.output = 0 get_current : print(smua.measure.i()) set_voltage : smua.source.levelv = get_voltage : print(smua.measure.v()) set_mode_measure_current : display.smua.measure.func = display.MEASURE_DCAMPS
<reponame>GaloisInc/hacrypto language: c branches: except: gh-pages compiler: - gcc - clang env: # Default build. Release. # Debug build env: global: # The next declration is the encrypted COVERITY_SCAN_TOKEN, created # via the "travis encrypt" command using the project repo's public key - secure: "<KEY> matrix: allow_failures: before_install: - sudo apt-get update -qq - sudo apt-get install debhelper libpcap-dev libssl-dev libnet1-dev libnss3-dev libnspr4-dev libgmp-dev libkrb5-dev yasm - export OMP_NUM_THREADS=4 script: - cd src && ./configure && make -s clean && make -sj4 check addons: coverity_scan: project: name: "magnumripper/JohnTheRipper" description: "http://openwall.com/john" notification_email: <EMAIL> build_command_prepend: build_command: "cd src && ./configure && make -s clean && make -sj4 check" branch_pattern: coverity_scan
<reponame>pan185/UnarySim # This file defines single architecture set for tlut systolic array performance projection - proj_16_16_bank16_block8 - proj_32_32_bank16_block8 - proj_64_64_bank16_block8 - proj_128_128_bank16_block8
name: spi_top clock_port: wb_clk_i verilog: - spi_clgen.v - spi_defines.v - spi_shift.v - spi_top.v - timescale.v
PROJECT: dt: 0.1e-6 cpu_debug_mode: 1 cpu_debug_hierarchies: [[0, 'top.tb_i.filter_i']] CPU_TARGET: sim: tstop: 10e-6 simctrl_path: './my_custom_simctrl.yaml' FPGA_TARGET: fpga: fpga_sim_ctrl: 'VIVADO_VIO' tstop: 10e-6 simctrl_path: './my_custom_simctrl.yaml'
name: CI on: [push] jobs: build: runs-on: ubuntu-latest steps: - uses: actions/checkout@v1 - name: Setup python version uses: actions/setup-python@v2 with: python-version: '3.x' - name: Install verilator run: sudo apt-get install verilator - name: Install fusesoc run: | pip install fusesoc fusesoc init -y - name: Setup project run: | fusesoc library add Illusion . fusesoc library add --sync-type git Verilator-Test https://github.com/Illusion-Graphics/Verilator-Test.git - name: Simulate Top run: | fusesoc run --target=test Illusion:RISC16:CPU:1.0
<reponame>f110/wing --- apiVersion: rbac.authorization.k8s.io/v1 kind: ClusterRole metadata: creationTimestamp: null name: minio-extra-operator rules: - apiGroups: - '*' resources: - pods - secrets - services verbs: - get - list - watch - apiGroups: - '*' resources: - pods/portforward verbs: - create - get - list - apiGroups: - minio.f110.dev resources: - miniobuckets verbs: - create - delete - get - list - patch - update - watch - apiGroups: - minio.f110.dev resources: - miniobuckets/status verbs: - get - patch - update - apiGroups: - minio.f110.dev resources: - miniousers verbs: - create - delete - get - list - patch - update - watch - apiGroups: - minio.f110.dev resources: - miniousers/status verbs: - get - patch - update - apiGroups: - miniocontroller.min.io resources: - minioinstances verbs: - get - list - watch
<reponame>bver/GERET --- algorithm: class: SteadyState population_size: 800 probabilities: crossover: 0.5 mutation: 0.01 injection: 0.9 termination: max_steps: 2000 on_individual: :stopping_condition init: method: ramped # grow or full or random or ramped random_length: 8 sensible_depth: 5 inject: # alternative to a crossover or copy method: grow # grow or full or random random_length: 8 sensible_depth: 5 grammar: class: Abnf::File filename: sample/santa_fe_ant_trail/grammar.abnf mapper: class: BreadthFirst track_support_on: true selection: class: Tournament # proportional_by: fitness tournament_size: 2 selection_rank: #optional class: Ranking replacement: class: Tournament # Truncation tournament_size: 10 replacement_rank: class: Ranking order_by: :fitness direction: :minimize crossover: class: CrossoverLHS mutation: class: MutationNodal store: class: Store filename: ./ant_steady_state_lhsc.store report: class: AntReport require: sample/santa_fe_ant_trail/ant_report.rb individual: class: AntIndividualSingleObjective require: sample/santa_fe_ant_trail/ant_individual.rb shorten_chromozome: true
<reponame>bver/GERET<gh_stars>1-10 --- algorithm: class: Spea2 population_size: 300 max_archive_size: 200 duplicate_elimination: false shorten_archive_individual: true probabilities: crossover: 0.5 mutation: 0.01 injection: 0.9 init: method: ramped # grow or full or ramped sensible_depth: 7 inject: method: grow # grow or full or random sensible_depth: 7 termination: max_steps: 1000 on_individual: stopping_condition grammar: class: Abnf::File filename: sample/santa_fe_ant_trail/grammar.abnf mapper: class: DepthLocus selection: class: Tournament tournament_size: 2 selection_rank: # do not change class: Spea2Ranking crossover: class: CrossoverRipple margin: 2 #1 step: 2 mutation: class: MutationRipple store: class: Store filename: ./ant_spea2.store report: class: AntReport require: sample/santa_fe_ant_trail/ant_report.rb individual: class: AntIndividualMOWeak require: sample/santa_fe_ant_trail/ant_individual.rb shorten_chromozome: false
version: 2 submodules: include: [] build: image: latest python: version: 3.8 install: - requirements: doc/requirements.txt sphinx: builder: html configuration: doc/conf.py formats: [] # - htmlzip # - pdf # - epub
fpga-support: files: [ rtl/AxiBramLogger.sv, rtl/AxiToAxiLitePc.sv, rtl/BramDwc.sv, rtl/BramLogger.sv, rtl/BramPort.sv, rtl/SyncDpRam.sv, rtl/SyncSpRam.sv, rtl/SyncSpRamBeNx32.sv, rtl/SyncSpRamBeNx64.sv, rtl/SyncTpRam.sv, rtl/TdpBramArray.sv, ]
<gh_stars>1-10 --- project: description: "A CAN peripheral for the wishbone bus" foundry: "SkyWater" git_url: "https://github.com/zaellis/caravel_user_project.git" organization: "<NAME>" organization_url: "https://zacharyell.is/" owner: "<NAME>" process: "SKY130" project_name: "Wishbone CAN" project_id: "00000000" tags: - "Open MPW" - "MPW-TWPTWO" category: "Test Harness" top_level_netlist: "caravel/verilog/gl/caravel.v" user_level_netlist: "verilog/gl/user_project_wrapper.v" version: "1.00" cover_image: "docs/source/_static/caravel_harness.png"
<reponame>recogni/cluster_interconnect<filename>src_files.yml low_latency_interco: incdirs: - rtl/low_latency_interco files: - rtl/low_latency_interco/FanInPrimitive_Req.sv - rtl/low_latency_interco/ArbitrationTree.sv - rtl/low_latency_interco/MUX2_REQ.sv - rtl/low_latency_interco/AddressDecoder_Resp.sv - rtl/low_latency_interco/TestAndSet.sv - rtl/low_latency_interco/RequestBlock2CH.sv - rtl/low_latency_interco/RequestBlock1CH.sv - rtl/low_latency_interco/FanInPrimitive_Resp.sv - rtl/low_latency_interco/ResponseTree.sv - rtl/low_latency_interco/ResponseBlock.sv - rtl/low_latency_interco/AddressDecoder_Req.sv - rtl/low_latency_interco/XBAR_TCDM.sv - rtl/low_latency_interco/XBAR_TCDM_WRAPPER.sv - rtl/low_latency_interco/TCDM_PIPE_REQ.sv - rtl/low_latency_interco/TCDM_PIPE_RESP.sv - rtl/low_latency_interco/grant_mask.sv - rtl/low_latency_interco/priority_Flag_Req.sv peripheral_interco: incdirs: [ rtl/peripheral_interco, ../../rtl/includes, ] files: [ rtl/peripheral_interco/AddressDecoder_PE_Req.sv, rtl/peripheral_interco/AddressDecoder_Resp_PE.sv, rtl/peripheral_interco/ArbitrationTree_PE.sv, rtl/peripheral_interco/FanInPrimitive_Req_PE.sv, rtl/peripheral_interco/RR_Flag_Req_PE.sv, rtl/peripheral_interco/MUX2_REQ_PE.sv, rtl/peripheral_interco/FanInPrimitive_PE_Resp.sv, rtl/peripheral_interco/RequestBlock1CH_PE.sv, rtl/peripheral_interco/RequestBlock2CH_PE.sv, rtl/peripheral_interco/ResponseBlock_PE.sv, rtl/peripheral_interco/ResponseTree_PE.sv, rtl/peripheral_interco/XBAR_PE.sv, ] tcdm_interconnect: incdirs: [ ] files: [ rtl/tcdm_interconnect/tcdm_interconnect_pkg.sv, rtl/tcdm_interconnect/addr_dec_resp_mux.sv, rtl/tcdm_interconnect/amo_shim.sv, rtl/tcdm_interconnect/xbar.sv, rtl/tcdm_interconnect/clos_net.sv, rtl/tcdm_interconnect/bfly_net.sv, rtl/tcdm_interconnect/tcdm_interconnect.sv, ]
version: "{build}" image: Visual Studio 2017 test: off deploy: off environment: global: # SDK v7.0 MSVC Express 2008's SetEnv.cmd script will fail if the # /E:ON and /V:ON options are not enabled in the batch script intepreter # See: http://stackoverflow.com/a/13751649/163740 CMD_IN_ENV: "cmd /E:ON /V:ON /C .\\appveyor\\run_with_env.cmd" PATH: C:\Ruby23\bin;C:\Ruby23-x64\DevKit\mingw\bin;C:\Python27;C:\Python27\Scripts;%PATH% init: - cmd: rd /s /q %CHOCOLATEYINSTALL% - ps: iex ((new-object net.webclient).DownloadString('https://chocolatey.org/install.ps1')) install: - cinst packages.config -y - refreshenv - apm install # https://packaging.python.org/guides/supporting-windows-using-appveyor/ - pip install -r requirements.txt # Gemfile Install - set PATH=C:\Ruby24-x64\bin;%PATH% - ruby -v - gem update --system - bundle install # PHP - ps: Set-Service wuauserv -StartupType Manual # PHP-CS-Fixer - composer install # elm-format - npm install -g elm-format@exp build_script: # Install languages to Atom - apm install --packages-file atom-packages.txt # Run tests on package #- "%LOCALAPPDATA%\\atom\\bin\\atom.cmd --test spec" - apm test --path %LOCALAPPDATA%/atom/bin/atom.cmd
<gh_stars>1000+ # Human readable task name name: Full Chain # Long form description. description: |+ Do you have what it takes to pwn all the layers? # The flag flag: CTF{next_stop_p2o_fda81a139a70c6d4} # Task category. (one of hw, crypto, pwn, rev, web, net, misc) category: pwn # === the fields below will be filled by SRE or automation === # Task label label: '' # URL for web challenges link: '' # host/port for non-web challenges host: 'fullchain.2021.ctfcompetition.com 1337' # the URL for attachments, to be filled in by automation attachment: '' # is this challenge released? Will be set by SREs visible: false
<reponame>BearerPipelineTest/google-ctf runtime: nodejs12 service: typeselfsub env_variables: DB_USER: ctf DB_PASS: "" DB_NAME: ctf INSTANCE_CONNECTION_NAME: your instance here
name: lint-review on: workflow_run: workflows: ["trigger-lint"] types: - completed jobs: lint_review: runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 - name: 'Download artifact' id: get-artifacts uses: actions/[email protected] with: script: | var artifacts = await github.actions.listWorkflowRunArtifacts({ owner: context.repo.owner, repo: context.repo.repo, run_id: ${{github.event.workflow_run.id }}, }); var matchArtifact = artifacts.data.artifacts.filter((artifact) => { return artifact.name == "event.json" })[0]; var download = await github.actions.downloadArtifact({ owner: context.repo.owner, repo: context.repo.repo, artifact_id: matchArtifact.id, archive_format: 'zip', }); var fs = require('fs'); fs.writeFileSync('${{github.workspace}}/event.json.zip', Buffer.from(download.data)); - run: | unzip event.json.zip - name: Run Verible action uses: chipsalliance/verible-linter-action@main with: paths: ./tests github_token: ${{ secrets.GITHUB_TOKEN }} suggest_fixes: 'false'
python-targets: - 3 # requirements: # - colorama # - py-flags # doc-warnings: yes test-warnings: no strictness: medium max-line-length: 180 pylint: disable: - too-many-arguments # options: # docstring-min-length: 10 pep8: disable: - E704 enable: options: max-line-length: 180 mccabe: options: max-complexity: 20 # vulture: # run: true ignore-paths: - lib - netlist - prj - sim - src # - tb - tcl - temp # - tools - ucf - xst
efpga_subsystem: incdirs: [ ../includes, ., ] files: [ efpga_subsystem.sv, apb_efpga_hwce.sv, dc_accelerator_fsm.sv, efpga_sel_clk_dc_fifo.sv, ]
<gh_stars>0 --- category: - Recursion - String manipulation - Classic CS problems and programs note: Text processing
<filename>.travis.yml<gh_stars>1-10 # http://docs.travis-ci.com/user/workers/container-based-infrastructure/ sudo: false language: python python: - "3.4" - "2.7" - "pypy" addons: apt: source: - ubuntu-toolchain-r-test packages: - build-essential - clang - bison - flex - libreadline-dev - gawk - tcl - tcl-dev - graphviz - xdot - pkg-config - autoconf - gperf - libffi-dev - libftdi-dev # update-alternatives: # - install /usr/bin/gcc gcc /usr/bin/gcc-4.9 50 # - install /usr/bin/g++ g++ /usr/bin/g++-4.9 50 # cannot figure out how to get a new version of gcc # to build the dependencies. # gcc-4.8 ??? why no work? # g++-4.8 ??? why no work? cache: directories: - $HOME/iverilog - $HOME/icestorm - $HOME/arachne-pnr - $HOME/yosys before_install: - g++ --version - export PATH=$PATH:$HOME/iverilog/bin/:$HOME/yosys/ - export PATH=$PATH:$HOME/icestorm/icepack/:$HOME/arachne-pnr/bin/ - echo $HOME - ls $HOME/ - chmod +x ./scripts/ci/install_iverilog.sh - ./scripts/ci/install_iverilog.sh # these fail to build and install because the inability to # update to gcc >= 4.8 # - chmod +x ./scripts/ci/install_icestorm.sh # - ./scripts/ci/install_icestorm.sh # - chmod +x ./scripts/ci/install_arachne.sh # - ./scripts/ci/install_arachne.sh - chmod +x ./scripts/ci/install_yosys.sh - ./scripts/ci/install_yosys.sh - ls $HOME/yosys/ - yosys -V # command to install dependencies install: - chmod +x ./scripts/ci/install_myhdl.sh - ./scripts/ci/install_myhdl.sh - pip install coverage - pip install coveralls - pip install Pillow - pip install . # run the test script: - cd examples - py.test -s # There is some issue that causes the tests executed with py.test # to take 10x longer, run the tests individually until resolved. - cd ../test - make test # - py.test -s notifications: email: false
dist: bionic language: cpp compiler: - gcc addons: apt: packages: - g++-8 before_install: - wget https://github.com/bazelbuild/bazel/releases/download/1.1.0/bazel_1.1.0-linux-x86_64.deb - sudo dpkg -i bazel_1.1.0-linux-x86_64.deb script: - bazel build --cxxopt='-std=c++17' //... - bazel test --cxxopt='-std=c++17' //...
# Copyright 2021 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # Human readable task name name: memsafety # Long form description. description: |+ Provide a snippet of Rust code to run as an app in our edge compute. We created a sandbox that doesn't allow any unsafe code and limits what can be run, so we can be sure the different apps are well isolated. # The flag flag: CTF{s4ndb0x1n9_s0urc3_1s_h4rd_ev3n_1n_rus7} # Task category. (one of hw, crypto, pwn, rev, web, net, misc) category: pwn # === the fields below will be filled by SRE or automation === # Task label label: '' # URL for web challenges link: '' # host/port for non-web challenges host: 'memsafety.2021.ctfcompetition.com 1337' # the URL for attachments, to be filled in by automation attachment: '' # is this challenge released? Will be set by SREs visible: false
--- input_file : ../akane/07_merge_sort_node_single.akd output_file : 07_merge_sort_node_single.md image_url : "Fig.1 マージソートノードと 4-way マージソートツリー" : "https://qiita-image-store.s3.ap-northeast-1.amazonaws.com/0/24981/44af6251-19a2-168c-4375-0d6a27e83105.jpeg" "Fig.2 マージソートノードの入出力" : "https://qiita-image-store.s3.ap-northeast-1.amazonaws.com/0/24981/d8d319bb-4d48-8af6-36d0-9ab65fd2a7b1.jpeg" "Fig.3 マージソートノードの状態遷移(1)" : "https://qiita-image-store.s3.ap-northeast-1.amazonaws.com/0/24981/e81dbf0e-4f8d-1540-793b-0142be6e28d4.jpeg" "Fig.4 マージソートノードの状態遷移(2)" : "https://qiita-image-store.s3.ap-northeast-1.amazonaws.com/0/24981/74a66f88-d7c1-af76-289d-62529ecfcabd.jpeg" "Fig.5 マージソートノードの状態遷移(3)" : "https://qiita-image-store.s3.ap-northeast-1.amazonaws.com/0/24981/e336b2a6-a38e-bbeb-516a-41fb440a3c40.jpeg" link_list : - id : "「はじめに」" title: "「VHDL で書くマージソーター(はじめに)」@Qiita" url : "https://qiita.com/ikwzm/items/6665b2ef44d878a5b85f" - id : "「ワードの定義」" title: "「VHDL で書くマージソーター(ワードの定義)」@Qiita" url : "https://qiita.com/ikwzm/items/bdcd8876317b908ff492" - id : "「ワード比較器」" title: "「VHDL で書くマージソーター(ワード比較器)」@Qiita" url : "https://qiita.com/ikwzm/items/d5d1dd264b1670f33bd7" - id : "「ソーティングネットワーク」" title: "「VHDL で書くマージソーター(ソーティングネットワーク)」@Qiita" url : "https://qiita.com/ikwzm/items/a1d06e47523759c726ae" - id : "「バイトニックマージソート」" title: "「VHDL で書くマージソーター(バイトニックマージソート)」@Qiita" url : "https://qiita.com/ikwzm/items/366eacbf6a877994c955" - id : "「バッチャー奇偶マージソート」" title: "「VHDL で書くマージソーター(バッチャー奇偶マージソート)」@Qiita" url : "https://qiita.com/ikwzm/items/c21a53f21b87408a7805" - id : "「シングルワード マージソート ノード」" title: "「VHDL で書くマージソーター(シングルワード マージソート ノード)」@Qiita" url : "https://qiita.com/ikwzm/items/7fd7ef9ffc4d9b314fee" - id : "「マルチワード マージソート ノード」" title: "「VHDL で書くマージソーター(マルチワード マージソート ノード)」@Qiita" url : "https://qiita.com/ikwzm/items/ed96b7a44b83bcee4ba5" - id : "「マージソート ツリー」" title: "「VHDL で書くマージソーター(マージソート ツリー)」@Qiita" url : "https://qiita.com/ikwzm/items/1f76ae5cda95aaf92501" - id : "「端数ワード処理」" title: "「VHDL で書くマージソーター(端数ワード処理)」@Qiita" url : "https://qiita.com/ikwzm/items/6b15340f1e05ef03f8d0" - id : "「ストリーム入力」" title: "「VHDL で書くマージソーター(ストリーム入力)」@Qiita" url : "https://qiita.com/ikwzm/items/56e22511021a082a2ccd" - id : "「ストリームフィードバック」" title: "「VHDL で書くマージソーター(ストリームフィードバック)」@Qiita" url : "https://qiita.com/ikwzm/items/e8c59c0ec92956c9355f" - id : "「ArgSort IP」" title: "「VHDL で書くマージソーター(ArgSort IP)」@Qiita" url : "https://qiita.com/ikwzm/items/89fc9542492fca74c9e3" - id : "「ArgSort-Ultra96」" title: "「VHDL で書くマージソーター(ArgSort-Ultra96)」@Qiita" url : "https://qiita.com/ikwzm/items/d58c9b77d038e23ac792" - id : "「ArgSort-Kv260」" title: "「VHDL で書くマージソーター(ArgSort-Kv260)」@Qiita" url : "https://qiita.com/ikwzm/items/ec0f779534c44b35334a" - id : "「FPGAを使って基本的なアルゴリズムのソーティングを劇的に高速化(1)」" title: "「FPGAを使って基本的なアルゴリズムのソーティングを劇的に高速化(1)」" url : "https://www.acri.c.titech.ac.jp/wordpress/archives/132" - id : "「VALID-then-READY」" title: "「VALID 信号と READY 信号によるハンドシェイクの注意点」" url : "https://qiita.com/ikwzm/items/9736b5547cb15309af5c" seg_level : -1 ---
<gh_stars>1-10 package: name: jtag_pulp authors: - "<NAME> <<EMAIL>>" # current maintainer - "<NAME> <>" - "<NAME> <<EMAIL>>" sources: - src/bscell.sv - src/jtag_axi_wrap.sv - src/jtag_enable.sv - src/jtag_enable_synch.sv - src/jtagreg.sv - src/jtag_rst_synch.sv - src/jtag_sync.sv - src/tap_top.v - target: test include_dirs: - testbench files: # - testbench/top_tb_jtag.v obsolete and broken - testbench/tb_jtag.sv
# Process node PROCESS: 45 LIBRARY_NAME: Nangate45 LIBERTY: nangate45.lib LEF: nangate45.lef # FIXME: Used in MCHL-T TECHLEF: nangate45.tech.lef CELLLEF: nangate45.cell.lef # Set the TIEHI/TIELO cells # These are used in yosys synthesis to avoid logical 1/0's in the netlist TIEHI_CELL_AND_PORT: ["LOGIC1_X1", "Z"] TIELO_CELL_AND_PORT: ["LOGIC0_X1", "Z"] # Placement site for core cells # This can be found in the technology lef PLACE_SITE: FreePDK45_38x28_10R_NP_162NW_34O # Track information for generating DEF tracks TRACKS_INFO_FILE: tracks.info # Wndcap and Welltie cells # Use fillers if kit doesn't have them ENDCAP_CELL: FILLCELL_X1 WELLTIE_CELL: FILLCELL_X1 WELLTTAP_RULE: 120 # TritonCTS options CTS_DUMMY_CELL: BUF_X1 CTS_BUF_CELL: BUF_X4 CTS_TECH_DIR: tritonCTS # RC information for the placer CAP_PER_MICRON: 0.235146e-12 RES_PER_MICRON: 1.59 # RC information for the sizer CAP_PER_METER: 0.36e-9 RES_PER_METER: 0.1233e+6 RESIZER_BUF_CELL: BUF_X4 # Magic technology file MAGIC_TECH_FILE: magic.tech # Dont use cells to ease congestion # Specify at least one filler cell if none DONT_USE_CELLS: FILLCELL_X1 # FIXME: Used in ABC logic synthesis DEFAULT_FLOP: DFF_X1 DEFAULT_FLOP_DPIN: D DEFAULT_FLOP_QPIN: Q DEFAULT_FLOP_CKPIN: CK # FIXME: Used in TritonFP VERILOG2DEF_DBU: 2000 IO_PLACER_VMETAL: 5 IO_PLACER_HMETAL: 6 CORE_SPACE: 14
# Copyright 2019 Xilinx Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. description: inception_resnetv2 classifier on ImageNet. input size: 299*299 float ops: 26.35G task: classification framework: tensorflow prune: 'no' version: 2.0 files: - name: tf_inceptionresnetv2_imagenet_299_299_26.35G_2.0 type: float & quantized board: GPU download link: https://www.xilinx.com/bin/public/openDownload?filename=tf_inceptionresnetv2_imagenet_299_299_26.35G_2.0.zip checksum: 7e0a8d19a431fe260cacbf7a8153dc9b - name: inception_resnet_v2_tf type: xmodel board: zcu102 & zcu104 & kv260 download link: https://www.xilinx.com/bin/public/openDownload?filename=inception_resnet_v2_tf-zcu102_zcu104_kv260-r2.0.0.tar.gz checksum: b8d98005f39e80f82b36def86dfac04f - name: inception_resnet_v2_tf type: xmodel board: vck190 download link: https://www.xilinx.com/bin/public/openDownload?filename=inception_resnet_v2_tf-vck190-r2.0.0.tar.gz checksum: c73225b1fee416221bd85a0d582363ba - name: inception_resnet_v2_tf type: xmodel board: vck50006pe-DPUCVDX8H-DWC download link: https://www.xilinx.com/bin/public/openDownload?filename=inception_resnet_v2_tf-vck50006pe-DPUCVDX8H-DWC-r2.0.0.tar.gz checksum: bc53d4abb92fcfe7fe1f807cecd77869 - name: inception_resnet_v2_tf type: xmodel board: u50lv-DPUCAHX8H download link: https://www.xilinx.com/bin/public/openDownload?filename=inception_resnet_v2_tf-u50lv-DPUCAHX8H-r2.0.0.tar.gz checksum: 0f874efa1114472fdee594aa962cc953 - name: inception_resnet_v2_tf type: xmodel board: u50lv-DPUCAHX8H-DWC & u55c-DPUCAHX8H-DWC download link: https://www.xilinx.com/bin/public/openDownload?filename=inception_resnet_v2_tf-u55c-u50lv-DPUCAHX8H-DWC-r2.0.0.tar.gz checksum: 9cc89b89e23a7b48c00e62231c5e41f2 license: https://github.com/Xilinx/Vitis-AI/blob/master/LICENSE
<filename>.github/workflows/test.yml name: Test on: push: branches: - master pull_request: jobs: build: name: Build runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 - name: Install dependencies run: sudo apt-get install -y uuid-dev libjson-c-dev - name: CMake run: cmake . - name: Build run: make -j
language: java jdk: - oraclejdk8 install: - script: - mvn verify cache: directories: - '$HOME/.m2/repository' env: global: #CLOUDSTACK_API_KEY #CLOUDSTACK_SECRET_ACCESS_KEY - <KEY> - secure: <KEY>
<filename>.github/workflows/ci.yml name: CI on: [push, pull_request] jobs: build: runs-on: ubuntu-latest steps: - name: Check out repository code uses: actions/checkout@v2 - name: Set up Python uses: actions/setup-python@v2 with: python-version: "3.6" - name: Set up testing environment run: pip install -r test/requirements.txt - name: Run tests run: | source Setup.bsh cd test pytest
package: name: fw-wishbone-sram-ctrl version: None deps: - name: fwprotocol-defs url: https://github.com/Featherweight-IP/fwprotocol-defs.git type: raw dev-deps: - name: fwprotocol-defs url: https://github.com/Featherweight-IP/fwprotocol-defs.git type: raw - name: cocotb type: python src: pypi - name: assertpy type: python src: pypi - name: pybfms src: pypi - name: pybfms-wishbone url: https://github.com/pybfms/pybfms-wishbone.git - name: pybfms-generic-sram url: https://github.com/pybfms/pybfms-generic-sram.git - name: mkdv url: https://github.com/fvutils/mkdv.git
<gh_stars>1-10 --- algorithm: class: Nsga2 population_size: 1000 probabilities: crossover: 0.9 mutation: 0.01 injection: 0.9 shorten_individual: true init: method: grow # grow or full or ramped sensible_depth: 6 inject: method: grow # grow or full or random sensible_depth: 6 termination: max_steps: 1000 on_individual: stopping_condition grammar: class: Abnf::File filename: sample/pid/grammar.abnf mapper: class: DepthFirst wraps_to_fail: 3 track_support_on: true crossover: class: CrossoverLHS # margin: 2 # step: 2 mutation: class: MutationNodal store: class: Store filename: ./pid_nsga2.store report: class: PopulationReport individual: class: PipedIndividual shorten_chromozome: true _pareto: :error: minimize :complexity: minimize _pipe_output: - :error: to_f _thresholds: :error: 0.009 _mark_batch: "MARKER\n" _mark_phenotype: "\n" evaluator: class: WorkPipes commands: - 'ruby sample/pid/pipe.rb ONE sample/pid/data.csv' - 'ruby sample/pid/pipe.rb TWO sample/pid/data.csv'
name: Build and Deploy on: push: branches: - main workflow_dispatch: jobs: build-and-deploy: runs-on: ubuntu-latest steps: - name: Checkout uses: actions/[email protected] - name: Configure Python uses: actions/setup-python@v2 with: python-version: '3.9' - name: Install Dependencies run: | sudo apt-get update sudo apt-get install texlive-latex-base texlive-fonts-recommended texlive-fonts-extra texlive-latex-extra latexmk - name: Install Python Deps run: | python3 -m pip install ivpm - name: Install Dependencies run: | python3 -m ivpm update --anonymous-git - name: Install and build run: | make html pdf touch build/html/.nojekyll - name: Deploy uses: JamesIves/[email protected] with: branch: gh-pages # The branch the action should deploy to. folder: build/html # The folder the action should deploy. - name: Build Wheel run: | python3 -m venv python ./python/bin/python3 -m pip install wheel twine ./python/bin/python3 -m pip install setuptools --upgrade export BUILD_NUM=$GITHUB_RUN_ID ./python/bin/python3 setup.py bdist_wheel --universal - name: Publish to PyPi if: startsWith(github.ref, 'refs/heads/main') uses: pypa/gh-action-pypi-publish@release/v1 with: user: __token__ password: ${{ secrets.PYPI_API_TOKEN }}
<reponame>slaclab/amc-carrier-core ############################################################################## ## This file is part of 'LCLS2 Common Carrier Core'. ## It is subject to the license terms in the LICENSE.txt file found in the ## top-level directory of this distribution and at: ## https://confluence.slac.stanford.edu/display/ppareg/LICENSE.html. ## No part of 'LCLS2 Common Carrier Core', including this file, ## may be copied, modified, propagated, or distributed except according to ## the terms contained in the LICENSE.txt file. ############################################################################## #schemaversion 3.0.0 #once AmcCarrierBsa.yaml #include BsaBufferControl.yaml #include BsaWaveformEngine.yaml #include BldAxiStream.yaml AmcCarrierBsa: &AmcCarrierBsa name: AmcCarrierBsa class: MMIODev configPrio: 1 description: AmcCarrier BSA Module size: 0x40000 ######### children: BsaBufferControl: <<: *BsaBufferControl at: offset: 0x00000000 BsaWaveformEngine: <<: *BsaWaveformEngine at: offset: 0x00010000 nelms: 2 stride: 0x00010000 BldAxiStream: <<: *BldAxiStream at: offset: 0x00030000
name: ibex description: Full ibex core test top_module: ibex_core tags: ibex path: third_party/cores/ibex command: fusesoc --cores-root third_party/cores/ibex run --target=lint --setup lowrisc:ibex:ibex_core:0.1 conf_file: build/lowrisc_ibex_ibex_core_0.1/lint-verilator/core-deps.mk test_file: ibex.sv timeout: 100
<gh_stars>10-100 # Copyright 2021 ETH Zurich and University of Bologna. # Licensed under the Apache License, Version 2.0, see LICENSE for details. # SPDX-License-Identifier: Apache-2.0 # Lint the design name: lint on: [push, pull_request] jobs: ################# # Check License # ################# check-license: runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 - name: Install Python requirements run: pip install -r python-requirements.txt - name: Check license run: python scripts/license-checker.py --config scripts/license-checker.hjson ###################### # Clang-Format Check # ###################### # Check C/C++ files for correct formatting. clangfmt: name: C/C++ Sources runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 - uses: DoozyX/[email protected] with: clangFormatVersion: 12 ######################## # Check Python Sources # ######################## python: name: Python Sources runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 - name: Install Python requirements run: pip install flake8 # Check that all python sources conform to the `pep8` standard - name: Check Python sources run: | flake8 ######################### # Check Trailing Spaces # ######################### check-trailing-whitespaces: runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 with: fetch-depth: 0 - name: Determine base commit run: | if [[ -n $GITHUB_BASE_REF ]]; then # Make sure we have the latest version of the target branch git fetch origin $GITHUB_BASE_REF echo "base=origin/$GITHUB_BASE_REF" >> $GITHUB_ENV else echo "base=HEAD~1" >> $GITHUB_ENV fi - name: Check for trailing whitespaces and tabs run: | git diff --check $base HEAD -- \ ':(exclude)**.def' \ ':(exclude)**.patch' \ ':(exclude)toolchain/**' \ ':(exclude)software/riscv-tests/**'
<reponame>mfkiwl/garnet name: cgra_rtl_sim_run commands: - bash run_rtl_testbench.sh inputs: - xcelium.d - libcgra.so - meta parameters: waveform: False cgra_apps: ["tests/conv_3_3", "apps/cascade"]
<filename>.travis.yml # Any language can be specified, the ghdl scripts not depend any of them. # (Default language is ruby on travis) language: python # Specify, that we want to use docker in the docker :) services: - docker script: - docker run -t -v `pwd`:/mnt/data ghdl/ghdl:buster-mcode /bin/sh -c "ghdl --version; ghdl -a /mnt/data/project.vhd;" # Notification is optional notifications: email: on_success: never on_failure: always
<reponame>hito0512/Vitis-AI # Copyright 2019 Xilinx Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. description: face quality model. This model outputs a face quality score, which is usually used to filter low-quality faces to further improve face recognition accuracy. input size: 80*60 float ops: 61.68M task: face quality framework: pytorch prune: 'no' version: 2.0 files: - name: pt_face-quality_80_60_61.68M_2.0 type: float & quantized board: GPU download link: https://www.xilinx.com/bin/public/openDownload?filename=pt_face-quality_80_60_61.68M_2.0.zip checksum: aec1ed6973c9736ec284ed5e83dfca5d - name: face-quality_pt type: xmodel board: zcu102 & zcu104 & kv260 download link: https://www.xilinx.com/bin/public/openDownload?filename=face-quality_pt-zcu102_zcu104_kv260-r2.0.0.tar.gz checksum: d2179b4a19270154248e067b8277576a - name: face-quality_pt type: xmodel board: vck190 download link: https://www.xilinx.com/bin/public/openDownload?filename=face-quality_pt-vck190-r2.0.0.tar.gz checksum: 29182515f37ae823a33deeb79a70818e - name: face-quality_pt type: xmodel board: vck50006pe-DPUCVDX8H-DWC download link: https://www.xilinx.com/bin/public/openDownload?filename=face-quality_pt-vck50006pe-DPUCVDX8H-DWC-r2.0.0.tar.gz checksum: c3e418d592c8b60585c1e802dae461cb - name: face-quality_pt type: xmodel board: vck50008pe-DPUCVDX8H download link: https://www.xilinx.com/bin/public/openDownload?filename=face-quality_pt-vck50008pe-DPUCVDX8H-r2.0.0.tar.gz checksum: ffdf4629a18d90f26cf9966c02f8a6ea - name: face-quality_pt type: xmodel board: u50lv-DPUCAHX8H download link: https://www.xilinx.com/bin/public/openDownload?filename=face-quality_pt-u50lv-DPUCAHX8H-r2.0.0.tar.gz checksum: 6bb6e1e7478a93d467241fa71f220bb4 - name: face-quality_pt type: xmodel board: u50lv-DPUCAHX8H-DWC & u55c-DPUCAHX8H-DWC download link: https://www.xilinx.com/bin/public/openDownload?filename=face-quality_pt-u55c-u50lv-DPUCAHX8H-DWC-r2.0.0.tar.gz checksum: 3769e67371c5ed8c2bec0510b1d88a7e license: https://github.com/Xilinx/Vitis-AI/blob/master/LICENSE
# Copyright lowRISC contributors. # Licensed under the Apache License, Version 2.0, see LICENSE for details. # SPDX-License-Identifier: Apache-2.0 # Azure template for uploading artifacts to a Google Cloud Platform (GCP) # bucket. # # This template first installs gsutil to interact with GCP resources. Then, # files located under parentDir and at paths specified by includeFiles will be # packed into archiveName (a tar.gz file) and uploaded to a GCP bucket located # at bucketURI using gsutil. # # Writing to a GCP bucket requires a GCP service account key with sufficient # permisions. This key must be uploaded to Azure as a Secure File. The name of # the key file should be provided as gcpKeyFile. # parameters: - name: parentDir type: string default: "" - name: includeFiles type: object default: [] - name: archiveName type: string default: "" - name: gcpKeyFile type: string default: "" - name: bucketURI type: string default: "" steps: - task: DownloadSecureFile@1 name: gcpkey inputs: secureFile: ${{ parameters.gcpKeyFile }} - bash: | echo "Installing gsutil" sudo apt-get install -y apt-transport-https ca-certificates gnupg echo "deb [signed-by=/usr/share/keyrings/cloud.google.gpg] https://packages.cloud.google.com/apt cloud-sdk main" | \ sudo tee /etc/apt/sources.list.d/google-cloud-sdk.list echo "vvvvvvvvv cat /etc/apt/sources.list.d/google-cloud-sdk.list" cat /etc/apt/sources.list.d/google-cloud-sdk.list echo "^^^^^^^^" curl https://packages.cloud.google.com/apt/doc/apt-key.gpg | sudo tee /usr/share/keyrings/cloud.google.gpg sudo apt-get update || { error "Failed to run apt-get update" } sudo apt-get install -y google-cloud-cli . util/build_consts.sh printf "$(date -u +%Y-%m-%dT%H:%M:%S)\n$(Build.SourceVersion)" > latest.txt printf "${{ join('\n', parameters.includeFiles) }}" > include_files.txt tar -C ${{ parameters.parentDir }} -zcvf ${{ parameters.archiveName }} -T include_files.txt gsutil -o Credentials:gs_service_key_file=$(gcpkey.secureFilePath) \ cp latest.txt ${{ parameters.bucketURI }}/latest.txt gsutil -o Credentials:gs_service_key_file=$(gcpkey.secureFilePath) \ cp -r ${{ parameters.archiveName }} ${{ parameters.bucketURI }}/${{ parameters.archiveName }} condition: succeeded() displayName: Upload artifacts to GCP bucket
<gh_stars>10-100 digital_ctrl_inputs: a_in: abspath: 'tb_i.a_in' width: 8 init_value: 0 b_in: abspath: 'tb_i.b_in' width: 8 init_value: 0 mode_in: abspath: 'tb_i.mode_in' width: 8 init_value: 0 digital_ctrl_outputs: c_out: abspath: 'tb_i.c_out' width: 8
<filename>models/AI-Model-Zoo/model-list/dk_yolov3_bdd_288_512_53.7G_2.0/model.yaml # Copyright 2019 Xilinx Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. description: yolov3 detection on bdd dataset. input size: 288*512 float ops: 53.7G task: detection framework: darknet prune: 'no' version: 2.0 files: - name: dk_yolov3_bdd_288_512_53.7G_2.0 type: float & quantized board: GPU download link: https://www.xilinx.com/bin/public/openDownload?filename=dk_yolov3_bdd_288_512_53.7G_2.0.zip checksum: a5b338675fbee02fdfb04fa8d6480eab - name: yolov3_bdd type: xmodel board: zcu102 & zcu104 & kv260 download link: https://www.xilinx.com/bin/public/openDownload?filename=yolov3_bdd-zcu102_zcu104_kv260-r2.0.0.tar.gz checksum: 3be8858ea107192204390d2cd6aa3c6d - name: yolov3_bdd type: xmodel board: vck190 download link: https://www.xilinx.com/bin/public/openDownload?filename=yolov3_bdd-vck190-r2.0.0.tar.gz checksum: 5afb513ddfa1b196f68e2fe30b18b944 - name: yolov3_bdd type: xmodel board: vck50006pe-DPUCVDX8H-DWC download link: https://www.xilinx.com/bin/public/openDownload?filename=yolov3_bdd-vck50006pe-DPUCVDX8H-DWC-r2.0.0.tar.gz checksum: 3670776da9163f7e8940195d9c8e4227 - name: yolov3_bdd type: xmodel board: vck50008pe-DPUCVDX8H download link: https://www.xilinx.com/bin/public/openDownload?filename=yolov3_bdd-vck50008pe-DPUCVDX8H-r2.0.0.tar.gz checksum: 0f902386b34e69d98d94031f08409409 - name: yolov3_bdd type: xmodel board: u50lv-DPUCAHX8H download link: https://www.xilinx.com/bin/public/openDownload?filename=yolov3_bdd-u50lv-DPUCAHX8H-r2.0.0.tar.gz checksum: d47589aa643f9724d49367e8d83b5cef - name: yolov3_bdd type: xmodel board: u50lv-DPUCAHX8H-DWC & u55c-DPUCAHX8H-DWC download link: https://www.xilinx.com/bin/public/openDownload?filename=yolov3_bdd-u55c-u50lv-DPUCAHX8H-DWC-r2.0.0.tar.gz checksum: c3f984403657a4db43d3b17bb48ce3ec license: https://github.com/Xilinx/Vitis-AI/blob/master/LICENSE
BUTTON_RST1: IP: __BUTTON_RST CMD: BUP ALT: a button_out_status: PIN: A34 DIRECTION: OUT BUTTON_RST2: IP: __BUTTON_RST CMD: BDP ALT: b button_out_status: PIN: A34 DIRECTION: OUT
pulp: incdirs: [ ../includes, ] files: [ jtag_tap_top.sv, pad_control.sv, cluster_domain.sv, safe_domain.sv, soc_domain.sv, rtc_date.sv, rtc_clock.sv, pad_frame.sv, pulp.sv, ]
# @package _global_ do_blink: backend: vivado figure: fig1 sub_figure: a device: 7020 part: xc7z020clg400-1 num_luts: 2400 bft: bft8
name: Autogenerate Docs on: push: branches: [ master ] jobs: build: runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 with: submodules: "recursive" - name: Setup Python uses: actions/setup-python@v1 with: python-version: '3.7' architecture: 'x64' - name: Install dependencies run: | python3 -m pip install --upgrade pip # install pip python3 -m pip install mkdocs # install mkdocs python3 -m pip install mkdocs-material # install material theme - name: Setup Zig uses: goto-bus-stop/setup-zig@v1 with: version: master - name: Compile wasm emulator run: | mkdir -p zig-out/bin mkdir -p zig-out/firmware zig build wasm - name: Render site run: mkdocs build - name: Add static files run: cp -r website/* website-out/livedemo/ - name: Add emulator run: cp zig-out/lib/emulator.wasm website-out/livedemo/emulator.wasm - name: Deploy to Server uses: easingthemes/[email protected] env: SSH_PRIVATE_KEY: ${{ secrets.DEPLOY_KEY }} ARGS: "-rltgoDzvO --delete" SOURCE: "website-out/" REMOTE_HOST: ${{ secrets.DEPLOY_HOST }} REMOTE_USER: ${{ secrets.DEPLOY_USERNAME }} TARGET: "/home/${{ secrets.DEPLOY_USERNAME }}/website"
apb_interrupt_cntrl: incdirs: [ ] files: [ apb_interrupt_cntrl.sv, ] jg_slint_top_name: [ apb_interrupt_cntrl ] jg_slint_elab_opt: [ ] jg_slint_postelab_cmds: [ ] jg_slint_clocks: [ clk_i, ] jg_slint_resets: [ ~rst_ni, ]
rtsim_test: before_script: - cd rtsim && make clean && make stage: test script: - make clean && make checks && make rtsim.dat cmoc_test: before_script: - cd cmoc && export PYTHONPATH=../build-tools/ stage: test script: - make && make checks cmoc_top_marblemini: before_script: - cd projects/cmoc_top/marblemini && ls /non-free stage: synthesis script: - XILINX_VIVADO=$XILINX_VIVADO PATH=$XILINX_VIVADO/bin:$PATH make HARDWARE=marblemini cmoc_top.bit artifacts: name: "$CI_JOB_NAME-$CI_COMMIT_REF_NAME" expire_in: 1 week paths: - projects/cmoc_top/marblemini/cmoc_top.bit
<gh_stars>0 name: Python on: [push] jobs: format: name: Python Linting runs-on: ubuntu-latest steps: - name: Checkout uses: actions/checkout@v2 - name: Fud Formatting check uses: RojerGS/python-black-check@master with: line-length: 88 path: 'fud' - name: Systolic Array Formatting check uses: RojerGS/python-black-check@master with: line-length: 88 path: 'frontends/systolic-lang' - name: Fud Linting check uses: TrueBrain/actions-flake8@master with: max_line_length: 88 path: 'fud' - name: Systolic Array Linting check uses: TrueBrain/actions-flake8@master with: max_line_length: 88 path: 'frontends/systolic-lang'
# Human readable task name name: Filestore # Long form description. description: |+ We stored our flag on this platform, but forgot to save the id. Can you help us restore it? # The flag flag: CTF{CR1M3_0f_d3dup1ic4ti0n} # Task category. (one of hw, crypto, pwn, rev, web, net, misc) category: misc # === the fields below will be filled by SRE or automation === # Task label label: '' # URL for web challenges link: '' # host/port for non-web challenges host: 'filestore.2021.ctfcompetition.com 1337' # the URL for attachments, to be filled in by automation attachment: '' # is this challenge released? Will be set by SREs visible: false
name: svlint on: [push, pull_request] jobs: svlint: runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 - name: Run svlint uses: dalance/svlint-action@v1 with: filelists: | compile.f env: RGGEN_SV_RTL_ROOT: ${{ github.workspace }}
<reponame>Juney1997/NyuziProcessor<filename>.travis.yml sudo: required language: c services: - docker # Don't pull the compiler and Verilator sources (which aren't built here and # are large) git: submodules: false # Pull a container from DockerHub that has Verilator and the Nyuzi toolchain. # It is created using build/Dockerfile and build/build-container.sh. before_install: - docker pull jeffbush001/nyuzi-build # Mount the source directory in the container and set that to be the working # directory. Then launch the script scripts/run_ci_tests.sh from this repository, # which will do the rest. script: - docker run -v $TRAVIS_BUILD_DIR:/root/NyuziProcessor -w /root/NyuziProcessor jeffbush001/nyuzi-build /bin/bash -c scripts/run_ci_tests.sh
dist: xenial language: python python: - 3.6.9 - '3.7' install: - pip install -r requirements.txt script: - "/bin/bash -c ci/run.sh" deploy: provider: pypi user: __token__ password: <PASSWORD>: <KEY> distributions: sdist bdist_wheel skip_existing: true skip_cleanup: true on: tags: true branches: only: - master
<reponame>StanfordAHA/garnet name: sim-run parameters: waveform: False tool: "XCELIUM" rtl_testvectors: ["test1", "test2", "test3", "test4"] commands: - bash run_sim.sh outputs: - run.log postconditions: - assert File( 'outputs/run.log' ) # must exist - assert 'Error,' not in File( 'outputs/run.log' ) - assert '*E,' not in File( 'outputs/run.log' )
<reponame>RiceShelley/EtherNIC<filename>.github/workflows/makefile.yml name: Simulations Checker on: push: branches: [ main ] pull_request: branches: [ main ] jobs: build: runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 - name: apt update run: sudo apt update - name: Install apt packages working-directory: ./tests run: xargs -a apt_requirements.txt sudo apt-get install -y - name: Install Python dependencies uses: py-actions/py-dependency-install@v3 with: path: "tests/pip_requirements.txt" update-pip: "false" update-setuptools: "false" update-wheel: "false" - name: Run tests working-directory: ./tests run: ./run_all_tests.sh --fast-fail
<gh_stars>10-100 # .readthedocs.yaml # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details version: 2 formats: - pdf sphinx: configuration: docs/source/conf.py python: install: - requirements: docs/source/requirements.txt
<reponame>iicarus-bit/google-ctf<filename>infrastructure/kctf/samples/xss-bot/k8s/containers.yaml apiVersion: "apps/v1" kind: "Deployment" metadata: name: "xss-bot" namespace: "default" labels: app: "xss-bot" spec: replicas: 1 selector: matchLabels: app: "xss-bot" template: metadata: labels: app: "xss-bot" annotations: container.apparmor.security.beta.kubernetes.io/challenge: localhost/ctf-profile spec: containers: - name: "challenge" # this will be updated to eu.gcr.io/$project/$challenge-dir using kustomization.yaml in the kctf-conf image: "xss-bot" ports: - containerPort: 1337 securityContext: capabilities: add: ["SYS_ADMIN"] readOnlyRootFilesystem: true command: resources: limits: cpu: "0.9" requests: cpu: "0.45" volumeMounts: - name: "secrets" mountPath: "/secrets" readOnly: true - name: "config" mountPath: "/config" readOnly: true - name: "tmp" mountPath: "/tmp" livenessProbe: failureThreshold: 2 httpGet: path: /healthz port: 8080 initialDelaySeconds: 20 timeoutSeconds: 3 periodSeconds: 30 - name: "healthcheck" image: "xss-bot-healthcheck" command: resources: limits: cpu: "1" requests: cpu: "0.05" volumeMounts: - name: "healthcheck-secrets" mountPath: "/secrets" readOnly: true - name: "healthcheck-exploit-key" mountPath: "/keys" readOnly: true - name: "healthcheck-config" mountPath: "/config" readOnly: true volumes: - name: "secrets" secret: # must be called ${challenge-dir}-secrets secretName: "xss-bot-secrets" defaultMode: 0444 - name: "config" configMap: # must be called ${challenge-dir}-config name: "xss-bot-config" - name: "healthcheck-secrets" secret: # must be called ${challenge-dir}-healthcheck-secrets secretName: "xss-bot-healthcheck-secrets" defaultMode: 0444 - name: "healthcheck-exploit-key" secret: # must be called ${challenge-dir}-healthcheck-exploit-key secretName: "xss-bot-healthcheck-exploit-key" defaultMode: 0444 - name: "healthcheck-config" configMap: # must be called ${challenge-dir}-healthcheck-config name: "xss-bot-healthcheck-config" - name: "tmp" emptyDir: medium: "Memory"
package: name: apb_node authors: ["<NAME> <<EMAIL>>, <NAME> <<EMAIL>>"] dependencies: apb: { git: "https://github.com/pulp-platform/apb.git", version: 0.1.0 } sources: - src/apb_node.sv - src/apb_node_wrap.sv
<gh_stars>1-10 spring: datasource: url: jdbc:postgresql://localhost:5432/ghostflow username: ghostflow driver-class-name: org.postgresql.Driver config: location: ~/ main: banner-mode: "off" constants: security: secret: LJNL67KCFB92L642j7h expirationTime: 864000000 tokenPrefix: 'Bearer ' header: 'Authorization'
<gh_stars>10-100 # pre-flowsetup # To get e.g. icovl-cells into iflow, must do the following: # - create new step 'pre-flowsetup' whose outputs are icovl cells # -- link via "command" in pre-iflow/configure.yml # - connect pre-flowsetup step to flowsetup (iflow) step # - extend iflow inputs to include icovl cells # - iflow "setup.tcl" automatically includes "inputs/*.lef" name: pre-flowsetup inputs: - adk outputs: - icovl-cells.lef - dtcd-cells.lef - bumpcells.lef commands: - cd outputs - cp -p ../inputs/adk/icovl-cells.lef . - cp -p ../inputs/adk/dtcd-cells.lef . - cp -p ../inputs/adk/bumpcells.lef .
--- algorithm: class: Alps population_size: 7000 elite_size: 50 max_layers: 12 aging_scheme: :fibonacci age_gap: 20 duplicate_elimination: true layer_diagnostic: false probabilities: crossover: 0.9 mutation: 0.01 init: method: grow # grow or full or random or ramped random_length: 8 sensible_depth: 6 termination: # max_steps: 2000 on_individual: :stopping_condition grammar: class: Abnf::File filename: sample/pid/grammar.abnf mapper: class: DepthLocus wraps_to_fail: 3 track_support_on: true selection: class: Tournament tournament_size: 3 selection_rank: class: Ranking elite_rank: class: Ranking order_by: :error #optional direction: :minimize #optional crossover: class: CrossoverLHS mutation: class: MutationNodal store: class: Store filename: ./pid_alps.store report: class: PopulationReport report_diversity: false report_statistics: true report_histogram: false individual: class: PipedIndividual shorten_chromozome: true _pareto: :error: minimize :complexity: minimize _pipe_output: - :error: to_f _thresholds: :error: 0.009 _mark_batch: "MARKER\n" _mark_phenotype: "\n" evaluator: class: WorkPipes commands: - 'ruby sample/pid/pipe.rb ONE sample/pid/data.csv' - 'ruby sample/pid/pipe.rb TWO sample/pid/data.csv'