Search is not available for this dataset
content
stringlengths 0
376M
|
---|
<reponame>ikwzm/merge_sorter
---
input_file : 03_word_compare.akd
output_file : ../03_word_compare.md
image_url :
"Fig.1 Word Compare" : "image/03_word_compare_1.jpg"
link_list :
- id : "「はじめに」"
title: "「VHDL で書くマージソーター(はじめに)」"
url : "./01_introduction.md"
- id : "「ワードの定義」"
title: "「VHDL で書くマージソーター(ワードの定義)」"
url : "./02_word_package.md"
- id : "「ワード比較器」"
title: "「VHDL で書くマージソーター(ワード比較器)」"
url : "./03_word_compare.md"
- id : "「ソーティングネットワーク」"
title: "「VHDL で書くマージソーター(ソーティングネットワーク)」"
url : "./04_sorting_network.md"
- id : "「バイトニックマージソート」"
title: "「VHDL で書くマージソーター(バイトニックマージソート)」"
url : "./05_bitonic_sorter.md"
- id : "「バッチャー奇偶マージソート」"
title: "「VHDL で書くマージソーター(バッチャー奇偶マージソート)」"
url : "./06_oddeven_sorter.md"
- id : "「シングルワード マージソート ノード」"
title: "「VHDL で書くマージソーター(シングルワード マージソート ノード)」"
url : "./07_merge_sort_node_single.md"
- id : "「マルチワード マージソート ノード」"
title: "「VHDL で書くマージソーター(マルチワード マージソート ノード)」"
url : "./08_merge_sort_node_multi.md"
- id : "「マージソート ツリー」"
title: "「VHDL で書くマージソーター(マージソート ツリー)」"
url : "./09_merge_sort_tree.md"
- id : "「端数ワード処理」"
title: "「VHDL で書くマージソーター(端数ワード処理)」"
url : "./10_merge_sort_core_1.md"
- id : "「ストリーム入力」"
title: "「VHDL で書くマージソーター(ストリーム入力)」"
url : "./11_merge_sort_core_2.md"
- id : "「ストリームフィードバック」"
title: "「VHDL で書くマージソーター(ストリームフィードバック)」"
url : "./12_merge_sort_core_3.md"
- id : "「ArgSort IP」"
title: "「VHDL で書くマージソーター(ArgSort IP)」"
url : "./13_argsort.md"
- id : "「ArgSort-Ultra96」"
title: "「VHDL で書くマージソーター(ArgSort-Ultra96)」"
url : "https://github.com/ikwzm/ArgSort-Ultra96/blob/1.2.1/doc/ja/argsort-ultra96.md"
- id : "「ArgSort-Kv260」"
title: "「VHDL で書くマージソーター(ArgSort-Kv260)」"
url : "https://github.com/ikwzm/ArgSort-Kv260/blob/1.2.1/doc/ja/argsort-Kv260.md"
---
|
<reponame>accuminium/axi
name: Build and deploy documentation
on:
push:
branches-ignore:
- gh-pages # deployment target branch (this workflow should not exist on that branch anyway)
- v** # such branch names conflict with tags
tags:
- v**
pull_request:
branches-ignore:
- gh-pages # deployment target branch (this workflow should not exist on that branch anyway)
- v** # such branch names conflict with tags
jobs:
build-and-deploy:
if: github.repository == 'pulp-platform/axi' # do not run this job on forks (because deployment
runs-on: ubuntu-latest # will fail)
steps:
- name: Checkout
uses: actions/checkout@v2
with:
persist-credentials: false
- name: Cache cargo registry
uses: actions/cache@v1
with:
path: ~/.cargo/registry
key: ubuntu-latest-cargo-registry-${{ hashFiles('.github/workflows/doc.yml') }}
- name: Cache cargo index
uses: actions/cache@v1
with:
path: ~/.cargo/git
key: ubuntu-latest-cargo-index-${{ hashFiles('.github/workflows/doc.yml') }}
- name: Cache cargo binaries
uses: actions/cache@v1
with:
path: ~/.cargo/bin
key: ubuntu-latest-cargo-binaries-${{ hashFiles('.github/workflows/doc.yml') }}
- name: Install Bender and Morty
run: |
rustup update stable --no-self-update && rustup default stable
if ! $(which bender); then
cargo install bender --version 0.18.0
fi
if ! $(which morty); then
cargo install --git https://github.com/zarubaf/morty --rev 4855119c1378d45d9ac35cfa525725d2786e68f3
fi
shell: bash
- name: Build documentation
run: |
mkdir -p docs
morty -I include -I $(bender path common_cells)/include src/*.sv -d docs
shell: bash
- name: Determine documentation target folder
run: |
if [ "$GITHUB_EVENT_NAME" == "pull_request" ]; then
DOC_TARGET="$GITHUB_HEAD_REF"
elif [ "$GITHUB_EVENT_NAME" == "push" ]; then
if echo $GITHUB_REF | grep -qE '^refs/(head|tag)s'; then
DOC_TARGET="$(echo $GITHUB_REF | cut -d '/' -f3-)"
else
echo "Error: Could not derive documentation target folder for ref '$GITHUB_REF'!"
exit 1
fi
else
echo "Error: Unsupported event: '$GITHUB_EVENT_NAME'!"
exit 1
fi
echo "DOC_TARGET=$DOC_TARGET" >> $GITHUB_ENV
- name: Deploy documentation
uses: JamesIves/github-pages-deploy-action@releases/v3
with:
ACCESS_TOKEN: ${{ secrets.ACCESS_TOKEN }}
BRANCH: gh-pages # The branch the action should deploy to.
FOLDER: docs # The folder the action should deploy.
TARGET_FOLDER: ${{ env.DOC_TARGET }}
CLEAN: true # remove files from `TARGET_FOLDER` that are not in `FOLDER`
# (`rsync --delete`)
|
<filename>testing/snort/execution/cfgs/snort_cfg.yml<gh_stars>10-100
ssh:
user: ncshy
key: '~/.ssh/id_rsa_tofino'
port: 22
params:
rate: '{0.args.rate}'
pktgen_template: ./pktgen_template.lua
n_set_pkts: 5000
n_get_pkts: 45000
n_total_pkts: 10000
key_space: 10000
zipf_coef: 1.00
dirs:
home: /home/{0.ssh.user}
P4Boosters: '{0.dirs.home}/source/P4Boosters'
snort: '{0.dirs.P4Boosters}/testing/snort/'
init_cmds:
make_lua: >-
python format_lua_template.py {0.params.pktgen_template} {0.files.pktgen_script.src}
--rate {0.params.rate} --log {0.programs.log_dir}/pktgen.log --count {0.params.n_total_pkts}
post_cmds:
verify_counters: >-
python verify_counters.py {0.out}/moongen/moongen.out {0.out}/counters_0.out {0.out}/counters_1.out
files:
pktgen_script:
src: '{0.out}/pktgen.lua'
dst: '{0.programs.log_dir}/pktgen.lua'
host: pktgen
pktgen_runner:
src: './run_lua.sh'
dst: '~/source/dpdk/run_lua.sh'
host: pktgen
pcap:
src: 'pcaps/{0.args.pcap_file}'
dst: '{0.programs.log_dir}/input.pcap'
host: pktgen
hosts:
pktgen:
addr: 172.16.31.10
mcd:
addr: 192.168.3.11
moongen:
addr: 172.16.17.32
encoder:
addr: dcomp1.seas.upenn.edu
port: 112
tofino:
addr: 192.168.3.11
ssh:
user: fpadmin
key: '~/.ssh/id_rsa_tofino'
port: 22
programs:
log_dir: ~/logs/{0.label}
start_dataplane:
host: tofino
start: cd ~/gits/TofinoP4Boosters/pointToPoint && ./run.sh 1-112
stop: cd ~/gits/TofinoP4Boosters/pointToPoint && ./stop.sh
enforce_duration: false
stop_dataplane:
host: tofino
start: cd ~/gits/TofinoP4Boosters/pointToPoint && ./stop.sh
check_rtn: 1
snort:
host: encoder
start: 'snort --perfmon-file {log} -q -u root -g ncshy -c /etc/snort/snort.conf -i eno1'
stop: sh -c 'pgrep snort | xargs kill -9'
log:
dir: snort_log
log: snort.stats
out: snort.out
err: snort.err
tcpreplay:
host: pktgen
start: '~/source/dpdk/tcpreplay --loop {0.args.loop} -p {0.args.tcpreplay_rate} -i ens1f1 {pcap}'
log:
dir: tcpreplay
out: tcpreplay.out
err: tcpreplay.err
pktgen:
host: pktgen
start: tmux new -d '~/source/dpdk/run_lua.sh {lua} {pcap}'
enforce_duration: false
log:
out: pktgen.out
err: pktgen.err
get_counters:
host: tofino
start: >-
tmux send ucli ENTER && sleep 1 &&
tmux send pm ENTER && sleep 1 &&
tmux send show ENTER && sleep 1 &&
tmux capture-pane -J -p
log:
out: counters_{i}.out
err: counters_{i}.err
commands:
stop_dataplane:
begin: -30
start_dataplane:
begin: -20
duration: 50
snort:
begin: -10
duration: 40
tcpreplay:
begin: 0
pcap: '{0.files.pcap.dst}'
duration: 30
|
<filename>scripts/fox_hoplite_multicast/FirmwareConfig.yaml
---
foxFirmware:
name: firmware_fox_hoplite_multicast
memory_size: 3072
resultFirmware:
name: firmware_fox_hoplite_multicast_result
memory_size: 4096
|
<gh_stars>1-10
name: PoC Default
on:
push:
paths:
- '.github/workflows/poc_default.yml'
- 'poc/**'
- '!poc/README.md'
defaults:
run:
working-directory: poc
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Use Node.js 12.x
uses: actions/setup-node@v1
with:
node-version: 12.x
- name: Install dependencies
run: npm ci
- name: Test building the application
run: npm run build
- name: Run the unit tests
run: npm test
|
# Human readable task name
name: Cpp
# Long form description.
description: |+
We have this program's source code, but it uses a strange DRM solution. Can you crack it?
# The flag
flag: CTF{pr3pr0cess0r_pr0fe5sor}
# Task category. (one of hw, crypto, pwn, rev, web, net, misc)
category: reversing
# === the fields below will be filled by SRE or automation ===
# Task label
label: ''
# URL for web challenges
link: ''
# host/port for non-web challenges
host: ''
# the URL for attachments, to be filled in by automation
attachment: ''
# is this challenge released? Will be set by SREs
visible: false
|
package:
name: apb
authors:
- "<NAME> <<EMAIL>>" # current maintainer
- "<NAME> <<EMAIL>>"
- "<NAME> <<EMAIL>>"
dependencies:
common_cells: { git: "https://github.com/pulp-platform/common_cells.git", version: 1.16.2 }
export_include_dirs:
- include
sources:
# Source files grouped in levels. Files in level 0 have no dependencies on files in this
# package. Files in level 1 only depend on files in level 0, files in level 2 on files in
# levels 1 and 0, etc. Files within a level are ordered alphabetically.
# Level 0
- src/apb_pkg.sv
# Level 1
- src/apb_intf.sv
# Level 2
- src/apb_regs.sv
- target: simulation
files:
- src/apb_test.sv
- target: test
files:
- test/tb_apb_regs.sv
- target: synth_test
files:
# Level 0
- test/synth_bench.sv
|
<gh_stars>10-100
# Copyright 2021 ETH Zurich and University of Bologna.
# Licensed under the Apache License, Version 2.0, see LICENSE for details.
# SPDX-License-Identifier: Apache-2.0
package:
name: tcdm_adapter
dependencies:
common_cells: { path: "../../deps/common_cells" }
axi: { path: "../../deps/axi" }
snitch: { path: "../../deps/snitch" }
sources:
# Level 0
- ../../src/mempool_pkg.sv
- ../../src/tcdm_adapter.sv
- target: tcdm_vsim
files:
# Level 1
- tcdm_adapter_tb.sv
|
name: Update Specification
on: [push]
jobs:
latex-job:
runs-on: ubuntu-latest
name: Specification Document Compilation
steps:
- uses: actions/checkout@v1
- name: Compilation
uses: vinay0410/tectonic-action@master
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
tex_path: "tex/documentation/documentation.tex"
push: "no"
- name: Upload Specification
uses: actions/upload-artifact@v1
with:
name: documentation.pdf
path: tex/documentation/documentation.pdf
|
<filename>documentation/metadata/ashet.yaml
---
title: Ashet Hardware Architecture
author:
- Felix "xq" Queißner
date: Jan 19, 2021
abstract: The document describes the system architecture of the Ashet Home Computer.
|
<reponame>mfkiwl/snitch
package:
name: apb_timer
authors: ["<NAME> <<EMAIL>>"]
sources:
# Level 0
- src/timer.sv
# Level 1
- src/apb_timer.sv
|
# Copyright 2019 Xilinx Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
description: Coverage prediction on Endoscopy Video.
input size: 512*512
float ops: 6.86G
task: classification
framework: pytorch
prune: None
version: 2.0
files:
- name: pt_C2D2lite_CC20_512_512_6.86G_2.0
type: float & quantized
board: GPU
download link: https://www.xilinx.com/bin/public/openDownload?filename=pt_C2D2lite_CC20_512_512_6.86G_2.0.zip
checksum: 7ebb202231559a2fb765b316b9d0d829
- name: C2D2_Lite_0_pt
type: xmodel
board: zcu102 & zcu104 & kv260
download link: https://www.xilinx.com/bin/public/openDownload?filename=C2D2_Lite_0_pt-zcu102_zcu104_kv260-r2.0.0.tar.gz
checksum: e3b9f6eea4a048118a9024cd0d172fc2
- name: C2D2_Lite_1_pt
type: xmodel
board: zcu102 & zcu104 & kv260
download link: https://www.xilinx.com/bin/public/openDownload?filename=C2D2_Lite_1_pt-zcu102_zcu104_kv260-r2.0.0.tar.gz
checksum: 34bfd73a7fc7167eb061d522dc9563f8
- name: C2D2_Lite_0_pt
type: xmodel
board: vck190
download link: https://www.xilinx.com/bin/public/openDownload?filename=C2D2_Lite_0_pt-vck190-r2.0.0.tar.gz
checksum: 87b578a1006fee78d36db1c312c83b65
- name: C2D2_Lite_1_pt
type: xmodel
board: vck190
download link: https://www.xilinx.com/bin/public/openDownload?filename=C2D2_Lite_1_pt-vck190-r2.0.0.tar.gz
checksum: d1577146a8587d82acef22f912202964
license: https://github.com/Xilinx/Vitis-AI/blob/master/LICENSE
|
---
# Copyright 2021 Datum Technology Corporation
# SPDX-License-Identifier: Apache-2.0 WITH SHL-2.1
########################################################################################################################
# Licensed under the Solderpad Hardware License v 2.1 (the "License"); you may not use this file except in compliance
# with the License, or, at your option, the Apache License version 2.0. You may obtain a copy of the License at
# https://solderpad.org/licenses/SHL-2.1/
# Unless required by applicable law or agreed to in writing, any work distributed under the License is distributed on
# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
# specific language governing permissions and limitations under the License.
########################################################################################################################
# Sample file describing the results of a simulation. This file would be found in the same directory as the simulation
# logs.
summary:
project: my_project
ip: @my_scope/[email protected]
test-name: my_test
seed: 123213213
label: null
passed: False
fatals: 0
errors: 2
warnings: 4
infos: 402
arguments:
ip: ['dp=False', 'dp-width=32B']
sim: ['+NUM_PKTS=50']
custom-user-content:
my-field-a: "asdsadsa"
my-field-b: 232
duration:
timestamp-start: 2021-06-03
timestamp-end: 2021-06-03
simulation-time: 1_560_054.293ns
steps:
library-creation-successful: True
compilation-successful: True
elaboration-successful: True
fatals: []
errors:
- { time: "1_204_054.293ns", id: "123", msg: "", component: "" }
- { time: "1_204_054.293ns", id: "123", msg: "", component: "" }
warnings:
- { time: "1_204_054.293ns", id: "123", msg: "", component: "" }
- { time: "1_204_054.293ns", id: "123", msg: "", component: "" }
- { time: "1_204_054.293ns", id: "123", msg: "", component: "" }
- { time: "1_204_054.293ns", id: "123", msg: "", component: "" }
files:
count: 32
size: 45MB
compressed: False
results-path: "./sim/results/my_ip/my_test__123213213/"
mlist: "./sim.mlist"
compilation-log: "./compilation.log"
elaboration-log: "./elaboration.log"
simulation-log: "./simulation.log"
waveforms: "./waves.wdb"
coverage-db: "./cov.ucdb"
additional-logs: ["./trn_log/*.log"]
simulator:
vendor: synopsys
name: vcs
version: 2018.0.1
path: "/tools/simulators/vcs/2018.0.1/"
|
comms_top:
before_script:
- cd projects/comms_top && ls /non-free
stage: synthesis
script:
- PATH=$XILINX_VIVADO/bin:$PATH make comms_top.bit
artifacts:
name: "$CI_JOB_NAME-$CI_COMMIT_REF_NAME"
expire_in: 1 week
paths:
- projects/comms_top/comms_top.bit
comms_top_run:
stage: program
tags:
- deploy
dependencies:
- comms_top
script:
- cd projects/comms_top
- sh qf2_070_3_setup.sh
- make QF2_TOOLS=qf2_users QF2_IP=192.168.1.30 hwload && sleep 8 && make hwtest
gige_sfp_ac701:
before_script:
- cd projects/comms_top/gige_eth && ls /non-free
stage: synthesis
script:
- PATH=$XILINX_VIVADO/bin:$PATH make HARDWARE=ac701 gige_top.bit
artifacts:
name: "$CI_JOB_NAME-$CI_COMMIT_REF_NAME"
expire_in: 1 week
paths:
- projects/comms_top/gige_eth/gige_top.bit
gige_sfp_ac701_run:
stage: program
tags:
- deploy
dependencies:
- gige_sfp_ac701
script:
- cd projects/comms_top/gige_eth && make hwload_ac701 && make hwtest_ac701
|
<gh_stars>0
language: python
python:
- 2.7
- 3.7
sudo: required
dist: xenial
notifications:
slack: silab-bonn:nU3pnaHN3RJ6WRFrfjec9vPM
email:
- <EMAIL>
- <EMAIL>
- <EMAIL>
before_install:
# Install cocotb and iverilog for interface test
# - git clone https://github.com/potentialventures/cocotb
# - export COCOTB=$(pwd)/cocotb
# Build iverilog from source
- sudo apt-get install gperf
- git clone https://github.com/steveicarus/iverilog.git
- cd iverilog && autoconf && ./configure && make && sudo make install && cd ..
# Download iverilog from PPA
# - sudo add-apt-repository -y ppa:team-electronics/ppa
# - sudo apt-get -q update
# - sudo apt-get -q install iverilog-daily
# Download iverilog from conda-forge
# - conda config --add channels conda-forge
# - conda install iverilog
install:
# conda
- wget http://repo.continuum.io/miniconda/Miniconda2-latest-Linux-x86_64.sh -O miniconda.sh
- bash miniconda.sh -b -p $HOME/miniconda
- export PATH="$HOME/miniconda/bin:$PATH"
- conda update --yes conda
- conda update --yes --all
- conda info -a
- conda create --yes -n test-environment python=$TRAVIS_PYTHON_VERSION numpy bitarray nose pyyaml coverage six
- source activate test-environment
- pip install pyvisa pyvisa-sim coveralls cocotb==1.0.dev3
# install basil
- python setup.py develop
script:
- nosetests --with-coverage --cover-package=basil tests/test_*.py examples/*/*/test_*.py
after_success:
- coveralls
deploy:
provider: pypi
user: davidlp
password:
secure: E<KEY>
on:
tags: true
repo: SiLab-Bonn/basil
distributions: sdist bdist_wheel
|
<filename>hw/ip/snitch_cluster/Bender.yml
# Copyright 2020 ETH Zurich and University of Bologna.
# Solderpad Hardware License, Version 0.51, see LICENSE for details.
# SPDX-License-Identifier: SHL-0.51
package:
name: snitch_cluster
authors:
- <NAME> <<EMAIL>>
- <NAME> <<EMAIL>>
dependencies:
axi: {path: ../../vendor/pulp_platform_axi}
common_cells: {path: ../../vendor/pulp_platform_common_cells}
fpnew: {path: ../../vendor/pulp_platform_fpnew}
register_interface: {path: ../../vendor/pulp_platform_register_interface}
riscv-dbg: {path: ../../vendor/pulp_platform_riscv_dbg}
tech_cells_generic: {path: ../../vendor/pulp_platform_tech_cells_generic}
# Local dependencies.
future: {path: ../../ip/future}
reqrsp_interface: {path: ../../ip/reqrsp_interface}
mem_interface: {path: ../../ip/mem_interface}
tcdm_interface: {path: ../../ip/tcdm_interface}
snitch_dma: {path: ../../ip/snitch_dma}
snitch_cluster_dma: {path: ../../ip/snitch_cluster_dma}
snitch_icache: {path: ../../ip/snitch_icache}
snitch_ipu: {path: ../../ip/snitch_ipu}
snitch_vm: {path: ../../ip/snitch_vm}
snitch_ssr: {path: ../../ip/snitch_ssr}
snitch: {path: ../../ip/snitch}
sources:
# Level 0:
- src/snitch_amo_shim.sv
- src/snitch_cluster_peripheral/snitch_cluster_peripheral_reg_pkg.sv
- src/snitch_cluster_peripheral/snitch_cluster_peripheral_reg_top.sv
- src/snitch_cluster_peripheral/snitch_cluster_peripheral.sv
- src/snitch_fpu.sv
- src/snitch_sequencer.sv
- src/snitch_tcdm_interconnect.sv
# Level 1:
- src/snitch_barrier.sv
- src/snitch_fp_ss.sv
- src/snitch_shared_muldiv.sv
- src/virtual_stdout_demux.sv
# Level 2:
- src/snitch_cc.sv
- src/snitch_clkdiv2.sv
# Level 3:
- src/snitch_hive.sv
# Level 4:
- src/snitch_cluster.sv
- target: test
files:
# Level 0
- test/snitch_tcdm_interconnect_tb.sv
|
datamover:
vlog_opts: [
+nowarnSVCHK,
-suppress 2275,
-L hwpe_stream_lib,
-L hwpe_ctrl_lib,
-L hci_lib,
]
files: [
rtl/datamover_package.sv,
rtl/datamover_streamer.sv,
rtl/datamover_engine.sv,
rtl/datamover_top.sv,
]
|
<gh_stars>10-100
---
# Wait a few clk cycles for core to go idle
- {opcode: wait, addr: 0, data: 0}
- {opcode: wait, addr: 0, data: 0}
# Enable all interrupts
- {opcode: write, addr: 0x00000004, data: 0x00000007}
# Enable kmac mode, cSHAKE, 128-bit kstrength, no endian swap
- {opcode: write, addr: 0x00000010, data: 0x00000131}
# Write key-share 0 (128 bit)
- {opcode: write, addr: 0x0000001C, data: 0xDEADBEEF}
- {opcode: write, addr: 0x00000020, data: 0xDEADBEEF}
- {opcode: write, addr: 0x00000024, data: 0xDEADBEEF}
- {opcode: write, addr: 0x00000028, data: 0xDEADBEEF}
# Write key-share 0 (128 bit)
- {opcode: write, addr: 0x0000005C, data: 0xFFFFFFFF}
- {opcode: write, addr: 0x00000060, data: 0xFFFFFFFF}
- {opcode: write, addr: 0x00000064, data: 0xFFFFFFFF}
- {opcode: write, addr: 0x00000068, data: 0xFFFFFFFF}
# Write prefix
# TODO(ttrippel): add prefix to seeds
# Issue START command (to start sending in message)
- {opcode: write, addr: 0x00000014, data: 0x00000001}
# Write message
- {opcode: write, addr: 0x00000800, data: 0xDEADBEEF}
- {opcode: write, addr: 0x00000804, data: 0xCA231241}
- {opcode: write, addr: 0x00000808, data: 0x01010101}
- {opcode: write, addr: 0x0000080C, data: 0xFEEDFEED}
- {opcode: write, addr: 0x00000810, data: 0xFAC23423}
- {opcode: write, addr: 0x00000814, data: 0x66666666}
- {opcode: write, addr: 0x00000818, data: 0x11001100}
- {opcode: write, addr: 0x0000081C, data: 0x88253235}
# Write right_encode(output_length)
- {opcode: write, addr: 0x00000800, data: 0x00020100}
# Issue PROCESS command (to start hashing)
- {opcode: write, addr: 0x00000014, data: 0x00000002}
# Wait for the hash to complete
- {opcode: wait, addr: 0x00000000, data: 0x00000000, repeat: 80}
# Clear interrupts
- {opcode: write, addr: 0x00000000, data: 0xFFFFFFFF}
# Read some of the output
- {opcode: read, addr: 0x00000400, data: 0x0}
- {opcode: read, addr: 0x00000404, data: 0x0}
- {opcode: read, addr: 0x00000408, data: 0x0}
- {opcode: read, addr: 0x0000040C, data: 0x0}
- {opcode: read, addr: 0x00000410, data: 0x0}
- {opcode: read, addr: 0x00000414, data: 0x0}
# Issue DONE command
- {opcode: write, addr: 0x00000014, data: 0x00000008}
- {opcode: wait, addr: 0, data: 0}
- {opcode: wait, addr: 0, data: 0}
...
|
<filename>third_party/tests/ariane/src/fpu/src/fpu_div_sqrt_mvp/Bender.yml
package:
name: fpu_div_sqrt_mvp
dependencies:
common_cells: { git: "https://github.com/pulp-platform/common_cells.git", version: 1.13.1 }
sources:
# Level 0
- hdl/defs_div_sqrt_mvp.sv
- hdl/iteration_div_sqrt_mvp.sv
# Level 1
- hdl/control_mvp.sv
- hdl/norm_div_sqrt_mvp.sv
- hdl/preprocess_mvp.sv
# Level 2
- hdl/nrbd_nrsc_mvp.sv
# Level 3
- hdl/div_sqrt_top_mvp.sv
# Level 4
- hdl/div_sqrt_mvp_wrapper.sv
|
language: scala
scala: 2.11.7
notifications:
slack:
rooms:
- plasticine-arch:kRp0KfrygHiq2wCMrcgMogBW#regression
email:
recipients: <EMAIL>
on_success: change # default: change
on_failure: always # default: always
# branches:
# only:
# - fpga
# - master
install:
- wget https://www.veripool.org/ftp/verilator-3.900.tgz
- unset VERILATOR_ROOT # For bash
- tar xvzf verilator*.t*gz
- cd verilator*
- ./configure
- make
- sudo make install
before_script:
- sudo chmod +x /usr/local/bin/sbt # Travis is being so annoying (https://github.com/travis-ci/travis-ci/issues/7703)
- cd ${NEW_TEMPLATES_HOME}
script:
- free -k
- bash ./run-templates.sh all
- sbt "test:runMain templates.Launcher all"
- cd /home/travis/build/stanford-ppl/spatial/
- sbt compile
|
name: RTL-CI
on:
push:
branches: [main]
pull_request:
branches: [main]
schedule: [cron: "0 */24 * * *"]
jobs:
build-and-simulate:
name: Build and Simulate
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: olafurpg/setup-scala@v10
with:
java-version: [email protected]
- name: Check out Spinal-CocotbLib code
uses: actions/checkout@v2
with:
repository: SpinalHDL/CocotbLib
path: ./spinal-rtl/src/test/python/cocotblib
- name: Run
env:
SHOW_WAVE: false
run: |
sudo apt-get update
cd verilog-rtl
sudo apt-get install -y iverilog yosys
# sudo apt-get install -y gtkwave
sudo apt-get install expect
# Install TLV code generator
pip3 install sandpiper-saas
# Confirm sandpiper agreement
./src/riscv_pll/sandpiper-confirm.sh
./run.sh
cd ../spinal-rtl
sudo apt-get install -y verilator
# sudo apt-get install -y lcov
# sudo apt-get install python3-pip
# Local install Cocotb and set PATH env
pip3 install cocotb
export PATH="$HOME/.local/bin:$PATH"
#./run.sh
- name: Setup tmate session
if: ${{ failure() }}
uses: mxschmitt/action-tmate@v3
|
language: scala
scala:
- 2.11
sudo: false
cache:
directories:
$HOME/.ivy2
$INSTALL_DIR
git:
depth: 10
sbt_args: -Dsbt.log.noformat=true
env:
global:
INSTALL_DIR=$TRAVIS_BUILD_DIR/install
VERILATOR_ROOT=$INSTALL_DIR
PATH=$PATH:$VERILATOR_ROOT/bin:$TRAVIS_BUILD_DIR/utils/bin
install:
# Grab Chisel 3
- git clone https://github.com/ucb-bar/chisel3.git
# Install Verilator (if not found in cache)
- bash .install_verilator.sh
script:
# FIRRTL Tests
- verilator --version
- cd $TRAVIS_BUILD_DIR
- sbt clean test assembly publish-local
- mkdir -p chisel3/lib
- cp utils/bin/firrtl.jar chisel3/lib
# Chisel 3 Tests
- cd chisel3
- sbt clean test
|
name: fw-ci
on:
push:
branches:
- develop
jobs:
fw-release:
runs-on: ubuntu-latest
steps:
- name: Git Checkout
uses: actions/checkout@v2
- name: Build fw/all
run: |
cmake -GNinja -Bbuild
cmake --build build --target fw/all
container: nupole/hdl-tools:develop
|
<gh_stars>0
---
- hosts: packet-generator
gather_facts: False
tasks:
- name: stop carbon metrics collection
command: "killall pktgen_carbon.bash"
ignore_errors: True
tags:
- generator
|
version: '3.3'
services:
controller:
image: ${CONTROL_TAG}
volumes:
- ./configs:/configs
- ./logs:/ping_controller/logs
user: "${UID}:${GUID}"
network_mode: "host"
stdin_open: true # -i flag
tty: true # -t flag
server:
image: ${SERVER_TAG}
user: "${UID}:${GUID}"
depends_on:
- controller
volumes:
- ./configs:/configs
- ./logs:/server/logs
command: --radioaddr localhost -r 0.0.0.0 --dispatch --config /configs -vvvv --livegame ${EXTRA_ARGS}
network_mode: "host"
|
# Copyright 2019 Xilinx Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
description: refinedet pedestrian detector.
input size: 360*480
float ops: 123G
task: detection
framework: caffe
prune: 'no'
version: 2.0
files:
- name: cf_refinedet_coco_360_480_123G_2.0
type: float & quantized
board: GPU
download link: https://www.xilinx.com/bin/public/openDownload?filename=cf_refinedet_coco_360_480_123G_2.0.zip
checksum: 50df28358ebaf2db460f2d45a33e1ce6
- name: refinedet_baseline
type: xmodel
board: zcu102 & zcu104 & kv260
download link: https://www.xilinx.com/bin/public/openDownload?filename=refinedet_baseline-zcu102_zcu104_kv260-r2.0.0.tar.gz
checksum: 1226f5e1d47e3d81023afbf069dbd4a5
- name: refinedet_baseline
type: xmodel
board: vck190
download link: https://www.xilinx.com/bin/public/openDownload?filename=refinedet_baseline-vck190-r2.0.0.tar.gz
checksum: 476455b991e4bf9f715353b6b9a3e601
- name: refinedet_baseline
type: xmodel
board: vck50006pe-DPUCVDX8H-DWC
download link: https://www.xilinx.com/bin/public/openDownload?filename=refinedet_baseline-vck50006pe-DPUCVDX8H-DWC-r2.0.0.tar.gz
checksum: 464611e79b3825bc58b0621451699353
- name: refinedet_baseline
type: xmodel
board: vck50008pe-DPUCVDX8H
download link: https://www.xilinx.com/bin/public/openDownload?filename=refinedet_baseline-vck50008pe-DPUCVDX8H-r2.0.0.tar.gz
checksum: 81e847580461231676b1eda7d341982e
- name: refinedet_baseline
type: xmodel
board: u50lv-DPUCAHX8H
download link: https://www.xilinx.com/bin/public/openDownload?filename=refinedet_baseline-u50lv-DPUCAHX8H-r2.0.0.tar.gz
checksum: c8b617d73ffd06690787ec7c763e48b8
- name: refinedet_baseline
type: xmodel
board: u50lv-DPUCAHX8H-DWC & u55c-DPUCAHX8H-DWC
download link: https://www.xilinx.com/bin/public/openDownload?filename=refinedet_baseline-u55c-u50lv-DPUCAHX8H-DWC-r2.0.0.tar.gz
checksum: 67a2ed85223c76bb699da14bac5d5883
license: https://github.com/Xilinx/Vitis-AI/blob/master/LICENSE
|
<reponame>RasooulFaraji/Simple-10GbE-RUDP-KCU105-Example
# ----------------------------------------------------------------------------
# This file is part of the `Simple-10GbE-RUDP-KCU105-Example`. It is subject to
# the license terms in the LICENSE.txt file found in the top-level directory
# of this distribution and at:
# https://confluence.slac.stanford.edu/display/ppareg/LICENSE.html.
# No part of the `Simple-10GbE-RUDP-KCU105-Example`, including this file, may be
# copied, modified, propagated, or distributed except according to the terms
# contained in the LICENSE.txt file.
# ----------------------------------------------------------------------------
# The following environment variables are required for this process:
# secrets.GH_TOKEN
name: Simple-10GbE-RUDP-KCU105-Example Integration
on: [push]
jobs:
test_and_document:
name: Test And Generate Documentation
runs-on: ubuntu-20.04
steps:
# This step checks out a copy of your repository.
- uses: actions/checkout@v2
with:
fetch-depth: 0
- uses: actions/setup-python@v2
with:
python-version: 3.8
- name: Install dependencies
run: |
sudo apt-get update
sudo apt-get install doxygen doxygen-doc doxygen-latex doxygen-gui graphviz
python -m pip install --upgrade pip
pip install setuptools
pip install -r pip_requirements.txt
- name: Generate Documentation
run: |
cd docs
make html
- name: Deploy Documentation
if: startsWith(github.ref, 'refs/tags/')
uses: peaceiris/actions-gh-pages@v3
with:
github_token: ${{ secrets.GH_TOKEN }}
publish_dir: docs/build/html
|
%YAML 1.2
---
Pedestrian:
pedestrian_haar_like_cascade_file_path: "/data/ConstraintGenerator/PedestrianDetector/HaarLike/pedestrian_cascade.xml"
y_of_top_of_detection_area: 40 # 歩行者認識の対象とする画像領域のy方向の下限
y_of_bottom_of_detection_area: 220 # 歩行者認識の対象とする画像領域のy方向の上限
x_of_left_of_detection_area: 200 # 歩行者認識の対象とする画像領域のx方向の下限
x_of_right_of_detection_area: 440 # 歩行者認識の対象とする画像領域のx方向の上限
haar_like_scale_factor: 1.1
haar_like_min_neighbors: 5
gaussian_kernel_size: 5
nof_closing: 1
hsv_trans_min_h: 0
hsv_trans_min_s: 30
hsv_trans_min_v: 60
hsv_trans_max_h: 20
hsv_trans_max_s: 150
hsv_trans_max_v: 255
ratio_of_labeled_region_lower: 0.60 # ラベリングされた領域/矩形領域の面積
ratio_of_labeled_region_upper: 0.85 # ラベリングされた領域/矩形領域の面積
aspect_ratio_lower: 0.20 # 横/縦
aspect_ratio_upper: 0.40 # 横/縦
# NOTE: 複数の領域が当てはまる場合、 $(lower + upper) / 2$ に近いものが採用されます
|
<filename>rtl/tb/src_files.yml<gh_stars>1-10
tb:
targets: [
rtl
]
files: [
riscv_pkg.sv,
jtag_pkg.sv,
pulp_tap_pkg.sv,
tb_clk_gen.sv,
tb_fs_handler.sv,
dpi_models/dpi_models.sv,
tb_driver/tb_driver.sv,
tb_pulp.sv,
SimJTAG.sv,
SimDTM.sv,
]
vlog_opts : [
-L riscv_dbg_lib,
]
|
<reponame>StanfordVLSI/dragonphy2<gh_stars>10-100
# Adapted from Garnet and ButterPHY
name: mdll_r1
commands:
- |
mkdir -p outputs
tar -xzvf /home/sjkim85/dragonphy_tarballs/mdll_r1_top-latest.tar.gz -C outputs
mv outputs/*/* outputs/
python comment_fill_bound_tap.py outputs/mdll_r1_top.lvs.v
mv outputs/mdll_r1_top_alt.lvs.v outputs/mdll_r1_top.lvs.v
outputs:
- mdll_r1_top.lef
- mdll_r1_top.gds
- mdll_r1_top_macro.cdl
- mdll_r1_top.lvs.v
- mdll_r1_top.version
|
# Copyright 2020 ETH Zurich and University of Bologna.
# Solderpad Hardware License, Version 0.51, see LICENSE for details.
# SPDX-License-Identifier: SHL-0.51
package:
name: clint
description: RISC-V Core-local Interrupt Controller
authors: [<NAME> <<EMAIL>>]
dependencies:
register_interface: {path: ../../vendor/pulp_platform_register_interface}
sources:
# Level 0:
- src/clint_reg_pkg.sv
# Level 1:
- src/clint_reg_top.sv
# Level 2:
- src/clint.sv
- target: test
files:
- test/clint_tb.sv
|
<gh_stars>0
- Global:
Print : true
- Library:
Name : pipework
Format : "add_vhdl_file sources_1 #{library_name} #{file_name}"
PathList : ["../../qconv-strip-vhdl/PipeWork/src/"]
Use : ["SDPRAM(XILINX_ULTRASCALE_AUTO_SELECT)"]
- Library:
Name : qconv
Format : "add_vhdl_file sources_1 #{library_name} #{file_name}"
PathList : ["../../qconv-strip-vhdl/src/main/vhdl/"]
Top : ["QCONV_STRIP_AXI3"]
|
# Copyright 2019 ETH Zurich and University of Bologna.
# Solderpad Hardware License, Version 0.51, see LICENSE for details.
# SPDX-License-Identifier: SHL-0.51
fpnew:
incdirs: [
../common_cells/include,
]
files: [
src/fpnew_pkg.sv,
src/fpnew_cast_multi.sv,
src/fpnew_classifier.sv,
src/fpnew_divsqrt_multi.sv,
src/fpnew_fma.sv,
src/fpnew_fma_multi.sv,
src/fpnew_noncomp.sv,
src/fpnew_opgroup_block.sv,
src/fpnew_opgroup_fmt_slice.sv,
src/fpnew_opgroup_multifmt_slice.sv,
src/fpnew_rounding.sv,
src/fpnew_top.sv,
]
|
<reponame>Datum-Technology-Corporation/core-v-mcu<gh_stars>10-100
core-v-mcu-soc:
incdirs: [
../includes,
include,
../vendor/pulp_platform_axi/include
]
files: [
soc/pkg_soc_interconnect.sv,
soc/axi64_2_lint32_wrap.sv,
soc/lint_2_axi_wrap.sv,
soc/contiguous_crossbar.sv,
soc/interleaved_crossbar.sv,
soc/tcdm_demux.sv,
soc/boot_rom.sv,
soc/l2_ram_multi_bank.sv,
soc/lint_jtag_wrap.sv,
soc/periph_bus_wrap.sv,
soc/soc_clk_rst_gen.sv,
soc/soc_event_arbiter.sv,
soc/soc_event_generator.sv,
soc/soc_event_queue.sv,
soc/tcdm_error_slave.sv,
soc/soc_interconnect.sv,
soc/soc_interconnect_wrap.sv,
soc/soc_peripherals.sv,
]
vlog_opts : [
-L riscv_dbg_lib,
-L axi_lib
]
udma_subsystem:
incdirs: [
../includes,
.,
]
files: [
udma_subsystem/udma_subsystem.sv,
]
fc:
incdirs: [
../includes,
.,
]
files: [
fc/fc_demux.sv,
fc/fc_subsystem.sv,
fc/fc_hwpe.sv,
fc/cv32e40p_fp_wrapper.sv,
]
components:
incdirs: [
../includes,
]
files: [
components/apb_soc_ctrl.sv,
components/pulp_interfaces.sv,
]
components_behav:
incdirs: [
../includes,
]
targets: [
rtl
]
files: [
components/freq_meter.sv,
]
flags: [
skip_synthesis,
]
efpga_subsystem:
incdirs: [
../includes,
.,
]
files: [
efpga_subsystem/efpga_subsystem.sv,
efpga_subsystem/A2_fifo.sv,
efpga_subsystem/A2_fifo_ctl.sv,
efpga_subsystem/A2_fifo_ram.sv,
efpga_subsystem/tcdm_interface.sv,
]
top:
incdirs: [
../includes,
]
files: [
top/pad_control.sv,
top/pad_frame.sv,
top/safe_domain.sv,
top/soc_domain.sv,
top/core_v_mcu.sv,
]
vlog_opts: [
-L riscv_dbg_lib
]
pulpissimo_padframe:
flags: [
skip_synthesis,
]
incdirs: [
../includes,
]
files: [
top/pad_frame.sv,
]
|
"at200":
arch: "artix7_200t"
device_family: "xc7a200t"
device_name: "fig1"
device_speed: "ffg1156-1"
device: "xc7a200t-fig1-roi-virt"
board: "nexys_video"
timeout: 200
|
<reponame>DeepFlyingSky/circt
name: "Request review from code owner"
on:
pull_request_target:
branches: [ main ]
jobs:
# Automatically request reviews from the code owner identified in a set of
# JSON files in codeowners/.
request_reviewer:
name: "Request review from code owner"
runs-on: ubuntu-18.04
steps:
- name: Get CIRCT
uses: actions/checkout@v2
with:
submodules: 'false'
- name: apply-herald-rules
id: herald
uses: gagoar/use-herald-action@master
continue-on-error: true
with:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
rulesLocation: codeowners/*.json
DEBUG: '*'
# Output the decisions
- name: Store response payload to file
run: echo '${{ steps.herald.outputs.appliedRules }}'
|
before_script:
- echo start testing $CI_PROJECT_NAME.
build_on_host:
tags:
- aisw
script:
- source /opt/rh/devtoolset-9/enable
- ./cmake.sh --clean
build_for_microblaze:
tags:
- aisw
script:
- source /var/lib/docker/scratch/gitlab-runner/build/mb_sdk/environment-setup-microblazeel-v11.0-bs-cmp-re-mh-div-xilinx-linux
- ./cmake.sh --clean
build_for_petalinux:
tags:
- aisw
script:
- source /var/lib/docker/scratch/gitlab-runner/build/sdk-0618/environment-setup-aarch64-xilinx-linux
- ./cmake.sh --clean
|
---
name: led-blinker
board: boards/zedboard
version: 0.1.1
cores:
- fpga/cores/axi_ctl_register_v1_0
- fpga/cores/axi_sts_register_v1_0
- fpga/cores/dna_reader_v1_0
memory:
- name: control
offset: '0x60000000'
range: 4K
- name: status
offset: '0x50000000'
range: 4K
control_registers:
- led
status_registers:
- forty_two
parameters:
fclk0: 50000000 # FPGA clock speed in Hz
xdc:
- ./constraints.xdc
drivers:
- server/drivers/common.hpp
- ./led_blinker.hpp
web:
- ./web/index.html
- web/koheron.ts
- web/led-blinker.ts
- ./web/app.ts
- ./web/control.ts
- web/main.css
|
steps:
- label: "test"
command: |
# set up environment
source /cad/modules/tcl/init/bash
module load base xcelium/19.03.003
export ANASYMOD_TARGET=sim_xcelium
printenv
# set up Python environment
/usr/local/miniconda/bin/python3.7 -m venv venv
source venv/bin/activate
# run regression script
source regress.sh
timeout_in_minutes: 60
agents:
fault2: "true"
- label: "test_emu"
command: |
# set up environment
source /etc/environment
export FPGA_SERVER=1
export ANASYMOD_TARGET=sim_vivado
printenv
# set up Python environment
python3.7 -m venv venv
source venv/bin/activate
# update the board name to match what is
# available on the regression server
sed -i 's/PYNQ_Z1/ZC702/g' unittests/*/prj.yaml
# run regression script
source regress.sh
artifact_paths:
- "unittests/*/build/*/prj/prj.runs/*/*.bit"
- "unittests/*/build/*/prj/prj.runs/*/*.ltx"
- "unittests/*/build/*/prj/prj.runs/*/*.log"
- "unittests/*/build/*/prj/prj.sim/*/behav/xsim/*.log"
- "unittests/*/build/*/prj/prj.sim/*/behav/xsim/*.sh"
- "unittests/*/build/*/prj/prj.runs/*/*.xsa"
- "unittests/*/build/*/prj/prj.runs/*/ps7_init.tcl"
- "unittests/*/build/*/prj/prj.sdk/*/*/*.elf"
- "unittests/*/build/*/*.tcl"
- "unittests/*/prj.y*ml"
timeout_in_minutes: 60
agents:
fpga_verif: "true"
|
---
version: 2.1
jobs:
build-and-test:
machine:
image: ubuntu-2004:202101-01
resource_class: arm.medium
environment:
RELEASE: https://github.com/modula3/cm3/releases/download/d5.11.4/cm3-boot-AMD64_LINUX-d5.11.4.tar.xz
CM3_TARGET: ARM64_LINUX
steps:
- run:
name: Install prerequisites
command: |
sudo apt-get update --quiet
sudo apt-get install --quiet --assume-yes cmake libglu1-mesa-dev ninja-build xorg-dev
- run:
name: Install bootstrap release
command: |
mkdir "${HOME}/bootstrap" "${HOME}/build"
curl --location --silent "${RELEASE}" | tar Jxf - --directory="${HOME}/bootstrap" --strip-components=1
cmake -S "${HOME}/bootstrap" -B "${HOME}/build" -G Ninja -DCMAKE_INSTALL_PREFIX="${HOME}/install"
cmake --build "${HOME}/build"
cmake --install "${HOME}/build"
- checkout
- run:
name: Build all the things
command: |
PATH="${HOME}/install/bin:${PATH}" scripts/concierge.py full-upgrade all
- run:
name: Run compiler validation suite
command: |
PATH="${HOME}/install/bin:${PATH}" cm3 -DHTML
working_directory: m3-sys/m3tests
- store_test_results:
path: m3-sys/m3tests/m3tests-results.xml
workflows:
ARM64_LINUX:
jobs:
- build-and-test
|
<reponame>mfkiwl/Bedrock
# Defs are made available as Verilog defines
# Params are made available as Verilog localparams and included in ROM metadata
marble_mini:
defs:
MARBLE_MINI: 1
USE_I2CBRIDGE: 1
MMC_CTRACE: 1
USE_GTPCLK: 1
params:
carrier: "Marble Mini"
carrier_rev: "v1"
default_enable_rx: 1
misc_config_default: 4
marble_v2:
defs:
MARBLE_V2: 1
USE_I2CBRIDGE: 1
MMC_CTRACE: 1
USE_GTPCLK: 1
params:
carrier: "Marble"
carrier_rev: "v2"
default_enable_rx: 1
misc_config_default: 4
|
metadata:
name: axil_addrdec
fields:
- address: 0x0----
name: M_00
behavior: axi
flatten: record
- address: 0x1----
name: M_01
behavior: axi
flatten: record
- address: 0x2----
name: M_02
behavior: axi
flatten: record
- address: 0x3----
name: M_03
behavior: axi
flatten: record
- address: 0x4----
name: M_04
behavior: axi
flatten: record
- address: 0x5----
name: M_05
behavior: axi
flatten: record
- address: 0x6----
name: M_06
behavior: axi
flatten: record
- address: 0x7----
name: M_07
behavior: axi
flatten: record
- address: 0x8----
name: M_08
behavior: axi
flatten: record
- address: 0x9----
name: M_09
behavior: axi
flatten: record
- address: 0xa----
name: M_10
behavior: axi
flatten: record
- address: 0xb----
name: M_11
behavior: axi
flatten: record
- address: 0xc----
name: M_12
behavior: axi
flatten: record
- address: 0xd----
name: M_13
behavior: axi
flatten: record
- address: 0xe----
name: M_14
behavior: axi
flatten: record
- address: 0xf----
name: M_15
behavior: axi
flatten: record
|
<reponame>justin371/rules_verilog<gh_stars>0
name: Run Bazel Tests
on: [push]
jobs:
bazel-tests:
name: Bazel Tests
runs-on: ubuntu-latest
steps:
- name: checkout
uses: actions/checkout@main
- name: install-bazelisk
uses: vsco/bazelisk-action@master
with:
version: '1.7.5'
bazel-install-path: './.local/bin'
os: 'linux'
- name: run-buildifier-diff
run: bazel run //tests:buildifier_format_diff
|
<reponame>letrend/neopixel_fpga
sudo: required
dist: trusty
language: generic
notifications:
email:
on_success: always
on_failure: always
env:
- ROS_DISTRO=indigo
before_script:
- export CI_SOURCE_PATH=$(pwd)
- export REPOSITORY_NAME=${PWD##*/}
- if [ ! "$ROS_PARALLEL_JOBS" ]; then export ROS_PARALLEL_JOBS="-j8 -l8"; fi
- echo "Testing branch $TRAVIS_BRANCH of $REPOSITORY_NAME"
- sudo sh -c 'echo "deb http://packages.ros.org/ros-shadow-fixed/ubuntu `lsb_release -cs` main" > /etc/apt/sources.list.d/ros-latest.list'
- wget http://packages.ros.org/ros.key -O - | sudo apt-key add -
- sudo add-apt-repository -y ppa:openrave/release
- sudo apt-get update -qq
- sudo apt-get install -qq -y python-catkin-pkg python-rosdep python-wstool python-catkin-tools ros-$ROS_DISTRO-catkin
- sudo rosdep init
- rosdep update; while [ $? != 0 ]; do sleep 1; rosdep update; done
- mkdir -p ~/ros/ws_$REPOSITORY_NAME/src
- cd ~/ros/ws_$REPOSITORY_NAME/src
- ln -s $CI_SOURCE_PATH . # Link the repo we are testing to the new workspace
- git clone https://github.com/ros/std_msgs std_msgs
- git clone https://github.com/ros/ros_comm_msgs ros_comm_msgs -b ${ROS_DISTRO}-devel
- git clone https://github.com/ros/common_msgs common_msgs -b ${ROS_DISTRO}-devel
- git clone https://github.com/ros/actionlib actionlib -b ${ROS_DISTRO}-devel
- find actionlib -name CMakeLists.txt -exec sed -i 's@add_subdirectory(test)@#add_subdirectory(test)@' {} \;
- cd ..
- rosdep install -q -r -n --from-paths src --ignore-src --rosdistro $ROS_DISTRO -y
script:
- source /opt/ros/$ROS_DISTRO/setup.bash
- catkin_make $ROS_PARALLEL_JOBS
# test for roseus
- sudo apt-get install -qq -y ros-${ROS_DISTRO}-roseus
- sudo dpkg -r --force-depends ros-${ROS_DISTRO}-geneus
# need to use latest test codes
- rm -fr devel build src/*
- cd src
- git clone http://github.com/jsk-ros-pkg/jsk_roseus /tmp/jsk_roseus
- (cd /tmp/jsk_roseus; git checkout `git describe --abbrev=0 --tags`)
- cp -r /tmp/jsk_roseus/roseus ./
#- sudo wget https://raw.githubusercontent.com/k-okada/jsk_roseus/fix_generate_all/roseus/scripts/generate-all-msg-srv.sh -O /opt/ros/hydro/share/roseus/scripts/generate-all-msg-srv.sh
- ln -s $CI_SOURCE_PATH . # Link the repo we are testing to the new workspace
- cd ..
# check roseus
- source /opt/ros/$ROS_DISTRO/setup.bash
- rm -fr build devel # need to clean up to check #42 case
- catkin build
- source devel/setup.bash
- cat devel/share/roseus/ros/roseus/manifest.l
- grep -c -e "/opt/ros/${ROS_DISTRO}/share/roseus/package.xml" devel/share/roseus/ros/roseus/manifest.l; [ "$?" == 1 ]; # check https://github.com/jsk-ros-pkg/geneus/pull/42
- rostest roseus test-genmsg.catkin.test
- rostest roseus test-genmsg-oneworkspace.catkin.launch
# check pr2eus
- git clone http://github.com/jsk-ros-pkg/jsk_pr2eus src/jsk_pr2eus
- (cd src/jsk_pr2eus; git checkout `git describe --abbrev=0 --tags`)
- sudo apt-get -qq -f install
- rosdep install -q -r -n --from-paths src --ignore-src --rosdistro $ROS_DISTRO -y
- sudo dpkg -r --force-depends ros-hydro-geneus
- catkin clean -a
- source /opt/ros/$ROS_DISTRO/setup.bash
- catkin build
- source devel/setup.bash
- rostest pr2eus pr2-ri-test-simple.launch
- rostest pr2eus default-ri-test.test
- rostest pr2eus pr2eus-test.launch
after_failure:
- rostest -t roseus test-genmsg.catkin.test
- rostest -t roseus test-genmsg-oneworkspace.catkin.launch
|
<filename>.github/workflows/riscv-arch-test.yml
# Run the RISC-V riscv-arch-test test framework port to check current NEORV32 version
name: 'riscv-arch-test'
on:
workflow_dispatch:
jobs:
build:
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
suite:
- I
- C
- M
- privilege
- Zifencei
name: 'RISC-V Compatibility Check'
steps:
- name: '🧰 Repository Checkout'
uses: actions/checkout@v2
- name: '⚙️ Setup Software Framework'
uses: docker://ghcr.io/stnolting/neorv32/sim
with:
args: ./do.py BuildAndInstallSoftwareFrameworkTests
- name: '🚧 Run RISC-V Architecture Tests'
uses: docker://ghcr.io/stnolting/neorv32/sim
with:
args: ./do.py RunRISCVArchitectureTests -s ${{ matrix.suite }}
|
<reponame>Calculasians/HDC-Sensor-Fusion-Research
power.inputs.waveforms_meta: "append"
power.inputs.waveforms:
- "/tools/B/daniels/hammer-tsmc28/build/sim-par-rundir/svm.vcd"
power.inputs.database: "/tools/B/daniels/hammer-tsmc28/build/par-rundir/latest"
power.inputs.tb_name: "SVM_tb"
power.inputs.saifs_meta: "append"
power.inputs.saifs:
- "/tools/B/daniels/hammer-tsmc28/build/sim-par-rundir/svm.saif"
power.inputs.start_times: ["0"]
power.inputs.end_times: ["57320"]
|
---
name: mars_zx3_template
board: boards/mars_zx3
version: 0.1.1
cores:
- fpga/cores/axi_ctl_register_v1_0
- fpga/cores/axi_sts_register_v1_0
- fpga/cores/dna_reader_v1_0
memory:
- name: control
offset: '0x60000000'
range: 4K
- name: status
offset: '0x50000000'
range: 4K
control_registers:
- led
status_registers:
- forty_two
- dna_high
- dna_low
parameters:
fclk0: 100000000 # FPGA clock speed in Hz
xdc:
- ./../../../boards/mars_zx3/config/ports.xdc
- ./constraints.xdc
drivers:
- server/drivers/common.hpp
- ./led_blinker.hpp
web:
- ./web/index.html
- web/koheron.ts
- web/led-blinker.ts
- ./web/app.ts
- web/main.css
|
<reponame>thirono/basil
# Device description for the Agilent Technologies E3644A Power Supply.
# set_ function expect a parameter, get_ function return a parameter.
# Just the very basic commands are imlemented here.
identifier: Agilent Technologies,E3644A
get_voltage: MEAS:VOLT:DC?
get_current: MEAS:CURR:DC?
set_enable: OUTP:STAT
get_enable: OUTP:STAT?
|
identifier: rudolv
name: RudolV RISC-V Soft-CPU
type: mcu
arch: riscv32
toolchain:
- zephyr
testing:
ignore_tags:
- net
- bluetooth
|
hwpe-stream:
vlog_opts: [
+nowarnSVCHK,
]
incdirs: [
rtl,
]
files: [
rtl/hwpe_stream_package.sv,
rtl/hwpe_stream_interfaces.sv,
rtl/hwpe_stream_addressgen.sv,
rtl/hwpe_stream_fifo_earlystall_sidech.sv,
rtl/hwpe_stream_fifo_earlystall.sv,
rtl/hwpe_stream_fifo_scm.sv,
rtl/hwpe_stream_fifo_sidech.sv,
rtl/hwpe_stream_fifo.sv,
rtl/hwpe_stream_buffer.sv,
rtl/hwpe_stream_merge.sv,
rtl/hwpe_stream_split.sv,
rtl/hwpe_stream_sink.sv,
rtl/hwpe_stream_source.sv,
rtl/hwpe_stream_sink_realign.sv,
rtl/hwpe_stream_source_realign.sv,
rtl/hwpe_stream_mux_static.sv,
rtl/hwpe_stream_demux_static.sv,
rtl/hwpe_stream_tcdm_fifo_load.sv,
rtl/hwpe_stream_tcdm_fifo_store.sv,
rtl/hwpe_stream_tcdm_mux.sv,
rtl/hwpe_stream_tcdm_mux_static.sv,
rtl/hwpe_stream_tcdm_reorder.sv,
rtl/hwpe_stream_tcdm_reorder_static.sv,
]
tb_hwpe_stream:
flags: [
skip_synthesis,
]
files: [
tb/tb_hwpe_stream_reservoir.sv,
tb/tb_hwpe_stream_receiver.sv,
]
tb_hwpe_stream_local:
flags: [
only_local,
skip_synthesis,
]
files: [
tb/tb_hwpe_stream_sink_realign.sv,
tb/tb_hwpe_stream_source_realign.sv,
]
|
name: Coverage
# Skip this step for now since the upload action seems to be broken:
# https://github.com/codecov/codecov-action/issues/37
on:
push:
branches-ignore:
- '**'
jobs:
test:
runs-on: ubuntu-latest
container:
image: xd009642/tarpaulin
options: --security-opt seccomp=unconfined
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Run tests
run: cargo tarpaulin -v --out Xml
- name: Upload coverage
uses: codecov/codecov-action@v1
with:
token: ${{secrets.CODECOV_TOKEN}}
fail_ci_if_error: true
|
<reponame>NicoBaumann454/alsp.net.api
### YamlMime:ManagedReference
items:
- uid: alps_.net_api.SimplePASSElement
commentId: T:alps_.net_api.SimplePASSElement
id: SimplePASSElement
parent: alps_.net_api
children: []
langs:
- csharp
- vb
name: SimplePASSElement
nameWithType: SimplePASSElement
fullName: alps_.net_api.SimplePASSElement
type: Interface
source:
remote:
path: alps .net api/alps .net api/OwlGraph.cs
branch: master
repo: http://imi-dev.imi.kit.edu:443/ukerz/alps-.net-api.git
id: SimplePASSElement
path: OwlGraph.cs
startLine: 4318
assemblies:
- alps.net_api
namespace: alps_.net_api
summary: "\nDies ist die Beschreibung der einfachen Element wobei diese an sich nicht unbedingt\nnotwending ist, mal schauen ob diese in der Bibliothek verbleiben, vermutlich schon \n"
example: []
syntax:
content: 'public interface SimplePASSElement : IOwlThing'
content.vb: >-
Public Interface SimplePASSElement
Inherits IOwlThing
modifiers.csharp:
- public
- interface
modifiers.vb:
- Public
- Interface
references:
- uid: alps_.net_api
commentId: N:alps_.net_api
name: alps_.net_api
nameWithType: alps_.net_api
fullName: alps_.net_api
|
# Builds glb_tile but not from scratch, uses pre-cached gold collateral
agents: { jobsize: "hours" }
##############################################################################
# Use this to test a specific branch/commit:
# Add to env:
# NOV11: ee214ef77b827f969e4b5f056f5d866cf391be7a
# Add to commands:
# - pwd; git branch; git checkout $$NOV11
##############################################################################
# Note: "echo exit 13" prevents hang at genus/innovus prompt
env:
TEST: 'echo exit 13 | mflowgen/test/test_module.sh'
GOLD: /sim/buildkite-agent/gold
steps:
##############################################################################
# COMPLETE GLB-TILE BUILD (ish) (uses cached rtl step)
#
- label: 'glb tile 2h'
commands:
# (Optional) check out branch version
# - pwd; git branch; git checkout $$NOV11
# Copy existing rtl collateral
- 'set -x; mkdir -p full_chip; cp -rp $$GOLD/full_chip/*-rtl full_chip/'
# Link to existing rtl result
- set -x; mkdir -p full_chip/14-glb_top/inputs
- set -x; pushd full_chip/*-glb_top/inputs; ln -s ../../*[0-9]-rtl/outputs/design.v; popd
# (Re?)run RTL, then do glb tile
- $TEST --need_space 3G full_chip glb_top --steps rtl --debug
- $TEST --need_space 3G full_chip glb_top --steps glb_tile --debug
- wait: { continue_on_failure: true } # One step at a time + continue on failure
|
package:
name: fwperiph-dma
version: None
deps:
- name: fwprotocol-defs
url: http://github.com/Featherweight-IP/fwprotocol-defs.git
type: raw
dev-deps:
- name: fwprotocol-defs
url: http://github.com/Featherweight-IP/fwprotocol-defs.git
type: raw
- name: vlsim
type: python
src: pypi
- name: pyvsc
url: https://github.com/fvutils/pyvsc.git
type: python
- name: mkdv
url: https://github.com/fvutils/mkdv.git
type: python
- name: pybfms
url: https://github.com/pybfms/pybfms.git
type: python
- name: pybfms_event
url: https://github.com/pybfms/pybfms_event.git
type: python
- name: pybfms_wishbone
url: https://github.com/pybfms/pybfms_wishbone.git
type: python
- name: pybfms_generic_sram
url: https://github.com/pybfms/pybfms_generic_sram.git
type: python
- name: zephyr-cosim
url: https://github.com/zephyr-cosim/zephyr-cosim.git
- name: zephyr
url: https://github.com/zephyrproject-rtos/zephyr.git
depth: 1
|
---
input_file : 11_merge_sort_core_2.akd
output_file : ../11_merge_sort_core_2.md
image_url :
"Fig.1 マージソートコアの構成" : "image/11_merge_sort_core_2_1.jpg"
"Fig.2 4-way マージソートツリーによる16ワードデータのソート例" : "image/11_merge_sort_core_2_2.jpg"
"Fig.3 最初のパスのDMA転送(ストリーム入力無し)" : "image/11_merge_sort_core_2_3.jpg"
"Fig.4 最初のパスのDMA転送(ストリーム入力あり)" : "image/11_merge_sort_core_2_4.jpg"
"Fig.5 マージソートコアのストリーム入力" : "image/11_merge_sort_core_2_5.jpg"
"Fig.6 マルチワードマージソートの最初のパス(ストリーム入力無しの場合)" : "image/11_merge_sort_core_2_6.jpg"
"Fig.7 マルチワードマージソートの最初のパス(ストリーム入力ありの場合)" : "image/11_merge_sort_core_2_7.jpg"
link_list :
- id : "「はじめに」"
title: "「VHDL で書くマージソーター(はじめに)」"
url : "./01_introduction.md"
- id : "「ワードの定義」"
title: "「VHDL で書くマージソーター(ワードの定義)」"
url : "./02_word_package.md"
- id : "「ワード比較器」"
title: "「VHDL で書くマージソーター(ワード比較器)」"
url : "./03_word_compare.md"
- id : "「ソーティングネットワーク」"
title: "「VHDL で書くマージソーター(ソーティングネットワーク)」"
url : "./04_sorting_network.md"
- id : "「バイトニックマージソート」"
title: "「VHDL で書くマージソーター(バイトニックマージソート)」"
url : "./05_bitonic_sorter.md"
- id : "「バッチャー奇偶マージソート」"
title: "「VHDL で書くマージソーター(バッチャー奇偶マージソート)」"
url : "./06_oddeven_sorter.md"
- id : "「シングルワード マージソート ノード」"
title: "「VHDL で書くマージソーター(シングルワード マージソート ノード)」"
url : "./07_merge_sort_node_single.md"
- id : "「マルチワード マージソート ノード」"
title: "「VHDL で書くマージソーター(マルチワード マージソート ノード)」"
url : "./08_merge_sort_node_multi.md"
- id : "「マージソート ツリー」"
title: "「VHDL で書くマージソーター(マージソート ツリー)」"
url : "./09_merge_sort_tree.md"
- id : "「端数ワード処理」"
title: "「VHDL で書くマージソーター(端数ワード処理)」"
url : "./10_merge_sort_core_1.md"
- id : "「ストリーム入力」"
title: "「VHDL で書くマージソーター(ストリーム入力)」"
url : "./11_merge_sort_core_2.md"
- id : "「ストリームフィードバック」"
title: "「VHDL で書くマージソーター(ストリームフィードバック)」"
url : "./12_merge_sort_core_3.md"
- id : "「ArgSort IP」"
title: "「VHDL で書くマージソーター(ArgSort IP)」"
url : "./13_argsort.md"
- id : "「ArgSort-Ultra96」"
title: "「VHDL で書くマージソーター(ArgSort-Ultra96)」"
url : "https://github.com/ikwzm/ArgSort-Ultra96/blob/1.2.1/doc/ja/argsort-ultra96.md"
- id : "「ArgSort-Kv260」"
title: "「VHDL で書くマージソーター(ArgSort-Kv260)」"
url : "https://github.com/ikwzm/ArgSort-Kv260/blob/1.2.1/doc/ja/argsort-Kv260.md"
- id : "ACRi"
title: "アダプティブコンピューティング研究推進体(ACRi)"
url : "https://www.acri.c.titech.ac.jp/wp"
- id : "アダプティブコンピューティング研究推進体(ACRi)"
title: "アダプティブコンピューティング研究推進体(ACRi)"
url : "https://www.acri.c.titech.ac.jp/wp"
- id : "「FPGAを使って基本的なアルゴリズムのソーティングを劇的に高速化(1)」"
title: "「FPGAを使って基本的なアルゴリズムのソーティングを劇的に高速化(1)」"
url : "https://www.acri.c.titech.ac.jp/wordpress/archives/132"
- id : "「FPGAを使って基本的なアルゴリズムのソーティングを劇的に高速化(2)」"
title: "「FPGAを使って基本的なアルゴリズムのソーティングを劇的に高速化(2)」"
url : "https://www.acri.c.titech.ac.jp/wordpress/archives/501"
- id : "「FPGAを使って基本的なアルゴリズムのソーティングを劇的に高速化(3)」"
title: "「FPGAを使って基本的なアルゴリズムのソーティングを劇的に高速化(3)」"
url : "https://www.acri.c.titech.ac.jp/wordpress/archives/2393"
- id : "「FPGAを使って基本的なアルゴリズムのソーティングを劇的に高速化(4)」"
title: "「FPGAを使って基本的なアルゴリズムのソーティングを劇的に高速化(4)」"
url : "https://www.acri.c.titech.ac.jp/wordpress/archives/3888"
- id : "「FPGAを使って基本的なアルゴリズムのソーティングを劇的に高速化(5)」"
title: "「FPGAを使って基本的なアルゴリズムのソーティングを劇的に高速化(5)」"
url : "https://www.acri.c.titech.ac.jp/wordpress/archives/4713"
---
|
<reponame>nicolasruscher/Artemis
version: '2'
services:
artemis-app:
image: artemis
environment:
- _JAVA_OPTIONS=-Xmx512m -Xms256m
- SPRING_PROFILES_ACTIVE=prod,swagger
- MANAGEMENT_METRICS_EXPORT_PROMETHEUS_ENABLED=true
- SPRING_DATASOURCE_URL=jdbc:mysql://artemis-mysql:3306/artemis?useUnicode=true&characterEncoding=utf8&useSSL=false&useLegacyDatetimeCode=false&serverTimezone=UTC&createDatabaseIfNotExist=true
- EUREKA_CLIENT_SERVICE_URL_DEFAULTZONE=http://admin:$${jhipster.registry.password}@jhipster-registry:8761/eureka
- SPRING_CLOUD_CONFIG_URI=http://admin:$${jhipster.registry.password}@jhipster-registry:8761/config
- JHIPSTER_SLEEP=30 # gives time for other services to boot before the application
ports:
- 8080:8080
artemis-mysql:
extends:
file: mysql.yml
service: artemis-mysql
|
description: >-
NSID 2020 final version of failover data and plotting scripts.
Bitstreams are the same as the corresponding e2e experiments
experiment: failover
repositories:
P4Boosters: 5ac827ee
files:
documentation.md: failover_readme.md
raw_data: failover_output_6/
power_data.pickle: analysis/fec_failover_powers.pickle
analysis.ipynb: analysis/failover_analysis.ipynb
|
<reponame>SigmaX-ai/bolson<gh_stars>1-10
name: Documentation
on:
push:
branches:
- master
pull_request:
paths:
- 'doc/**'
- 'src/**'
- 'Doxyfile'
- 'README.md'
jobs:
book:
name: Book
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Install mdbook
run: |
mkdir -p $HOME/mdbook
curl -L https://github.com/rust-lang/mdBook/releases/download/v0.4.7/mdbook-v0.4.7-x86_64-unknown-linux-gnu.tar.gz | tar xz -C $HOME/mdbook
echo "${HOME}/mdbook/" >> $GITHUB_PATH
- name: Install graphviz
run: sudo apt-get install -y graphviz
- uses: actions-rs/toolchain@v1
with:
toolchain: stable
default: true
- uses: actions-rs/cargo@v1
with:
command: install
args: --debug mdbook-graphviz
- name: Build
run: mdbook build
working-directory: doc
- uses: actions/upload-artifact@v2
with:
name: book
path: doc/book
cpp:
name: 'C++'
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Install doxygen
run: sudo apt-get install -y doxygen
- name: Doxygen
run: doxygen
- uses: actions/upload-artifact@v2
with:
name: cpp
path: doc/html
deploy:
name: Deploy
runs-on: ubuntu-latest
needs: [book, cpp]
if: github.event_name == 'push' && github.ref == 'refs/heads/master'
steps:
- uses: actions/download-artifact@v2
with:
name: book
- uses: actions/download-artifact@v2
with:
name: cpp
path: api/
- uses: peaceiris/actions-gh-pages@v3
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
publish_dir: .
|
verilog_sources:
sim_ctrl:
files: './sim_ctrl.*v'
functional_models:
filter:
files: './my_filter.py'
|
<reponame>Malcolmnixon/MotionFpga<filename>fpga/common/vhdltest.yaml
files:
# Source files
- utility/source/blink.vhd
- utility/source/clk_div_n.vhd
- utility/source/edge_detect.vhd
- utility/source/pwm.vhd
- utility/source/sdm.vhd
- utility/source/quad_decoder.vhd
- utility/source/delay_line.vhd
- utility/source/level_filter.vhd
- utility/source/step_generator.vhd
- devices/source/pwm_device.vhd
- devices/source/sdm_device.vhd
- devices/source/gpio_device.vhd
- devices/source/drv8711_spi.vhd
- communications/source/spi_slave.vhd
# Simulation files
- utility/sim/sim_on_percent.vhd
- utility/sim/sim_edge_count.vhd
- utility/sim/clk_div_n_tb.vhd
- utility/sim/edge_detect_tb.vhd
- utility/sim/pwm_tb.vhd
- utility/sim/sdm_tb.vhd
- utility/sim/quad_decoder_tb.vhd
- utility/sim/delay_line_tb.vhd
- utility/sim/level_filter_tb.vhd
- utility/sim/step_generator_tb.vhd
- devices/sim/pwm_device_tb.vhd
- devices/sim/sdm_device_tb.vhd
- devices/sim/gpio_device_tb.vhd
- devices/sim/drv8711_spi_tb.vhd
- communications/sim/sim_spi_master.vhd
- communications/sim/spi_slave_tb.vhd
tests:
# Testbenches
- clk_div_n_tb
- edge_detect_tb
- pwm_tb
- sdm_tb
- quad_decoder_tb
- delay_line_tb
- level_filter_tb
- step_generator_tb
- pwm_device_tb
- sdm_device_tb
- gpio_device_tb
- drv8711_spi_tb
- spi_slave_tb
|
# Check NEORV32 software framework and test processor
name: Processor
on:
push:
branches:
- main
paths:
- 'rtl/**'
- 'sw/**'
- 'sim/**'
pull_request:
branches:
- main
paths:
- 'rtl/**'
- 'sw/**'
- 'sim/**'
workflow_dispatch:
jobs:
Linux:
runs-on: ubuntu-latest
name: '🐧 Ubuntu | Shell script'
steps:
- name: '🧰 Repository Checkout'
uses: actions/checkout@v2
- name: '⚙️ Build Software Framework Tests'
uses: docker://ghcr.io/stnolting/neorv32/sim
with:
args: ./do.py BuildAndInstallSoftwareFrameworkTests
- name: '🚧 Run Processor Hardware Tests with shell script'
uses: docker://ghcr.io/stnolting/neorv32/sim
with:
args: ./sim/simple/ghdl.sh
VUnit-Container:
runs-on: ubuntu-latest
name: '🛳️ Container | VUnit'
steps:
- name: '🧰 Repository Checkout'
uses: actions/checkout@v2
- name: '⚙️ Build and install Processor Check software'
uses: docker://ghcr.io/stnolting/neorv32/sim
with:
args: >-
make -C sw/example/processor_check
clean_all
USER_FLAGS+=-DRUN_CHECK
USER_FLAGS+=-DUART0_SIM_MODE
USER_FLAGS+=-DSUPPRESS_OPTIONAL_UART_PRINT
MARCH=rv32imc
info
all
- name: '📤 Archive Processor Check application image'
uses: actions/upload-artifact@v2
with:
name: application
path: rtl/core/neorv32_application_image.vhd
- name: '🚧 Run Processor Hardware Tests with VUnit'
uses: VUnit/vunit_action@master
with:
image: ghcr.io/stnolting/neorv32/sim
cmd: ./sim/run.py --ci-mode -v
Windows:
runs-on: windows-latest
strategy:
fail-fast: false
matrix:
include: [
{icon: '⬛', installs: 'MINGW32' },
{icon: '🟦', installs: 'MINGW64' },
]
name: '${{ matrix.icon }} ${{ matrix.installs }} | VUnit'
defaults:
run:
shell: msys2 {0}
steps:
- name: '⚙️ git config'
run: git config --global core.autocrlf input
shell: bash
- name: '🧰 Checkout'
uses: actions/checkout@v2
with:
# The command 'git describe' (used for version) needs the history.
fetch-depth: 0
- name: '${{ matrix.icon }} Setup MSYS2'
uses: msys2/setup-msys2@v2
with:
msystem: ${{ matrix.installs }}
update: true
install: make
pacboy: >
ghdl:p
python-pip:p
riscv64-unknown-elf-gcc:p
- name: '⚙️ Build and install Processor Check software'
run: |
make -C sw/example/processor_check \
RISCV_PREFIX=riscv64-unknown-elf- \
clean_all \
USER_FLAGS+=-DRUN_CHECK \
USER_FLAGS+=-DUART0_SIM_MODE \
USER_FLAGS+=-DSUPPRESS_OPTIONAL_UART_PRINT \
MARCH=rv32imc \
info \
all
- name: '🐍 Install VUnit'
run: pip install vunit_hdl
- name: '🚧 Run Processor Hardware Tests with VUnit'
run: ./sim/run.py --ci-mode -v
|
# Copyright 2021 OpenHW Group
# Licensed under the Apache License, Version 2.0, see LICENSE for details.
# SPDX-License-Identifier: Apache-2.0
# Run functional regression checks
name: ci
on: [push, pull_request]
jobs:
#######
# ISA #
#######
base-isa:
name: Base ISA Tests
runs-on: ubuntu-latest
env:
RISCV: /riscv
steps:
- uses: actions/checkout@v2
with:
submodules: recursive
- name: Prepare
run: ci/setup.sh
- name: run asm tests (Write-Back Cache)
run: make run-asm-tests-verilator defines=WB_DCACHE
- name: run asm tests (Write-through Cache)
run: make run-asm-tests-verilator defines=WT_DCACHE
mul-isa:
name: Mul/Div ISA Tests
runs-on: ubuntu-latest
env:
RISCV: /riscv
steps:
- uses: actions/checkout@v2
with:
submodules: recursive
- name: Prepare
run: ci/setup.sh
- name: run mul tests (Write-Back Cache)
run: make run-mul-verilator defines=WB_DCACHE
- name: run mul tests (Write-through Cache)
run: make run-mul-verilator defines=WT_DCACHE
amo-isa:
name: Atomic ISA Tests
runs-on: ubuntu-latest
env:
RISCV: /riscv
steps:
- uses: actions/checkout@v2
with:
submodules: recursive
- name: Prepare
run: ci/setup.sh
- name: run amo tests (Write-Back Cache)
run: make run-amo-verilator defines=WB_DCACHE
- name: run amo tests (Write-through Cache)
run: make run-amo-verilator defines=WT_DCACHE
fp-isa:
name: Floating-point ISA Tests
runs-on: ubuntu-latest
env:
RISCV: /riscv
steps:
- uses: actions/checkout@v2
with:
submodules: recursive
- name: Prepare
run: ci/setup.sh
- name: run fp tests (Write-through Cache)
run: make run-fp-verilator defines=WT_DCACHE
- name: run fp tests (Write-Back Cache)
run: make run-fp-verilator defines=WB_DCACHE
##############
# Benchmarks #
##############
benchmarks:
name: Benchmarks
runs-on: ubuntu-latest
env:
RISCV: /riscv
steps:
- uses: actions/checkout@v2
with:
submodules: recursive
- name: Prepare
run: ci/setup.sh
- name: run riscv benchmarks (Write-Back Cache)
run: make run-benchmarks-verilator defines=WB_DCACHE
- name: run riscv benchmarks (Write-through Cache)
run: make run-benchmarks-verilator defines=WT_DCACHE
|
format_version: v1.0
type: UniqueComponent
attributes: {id: FreeRTOS}
children:
- type: Symbols
children:
- type: Integer
attributes: {id: FREERTOS_CPU_CLOCK_HZ}
children:
- type: Values
children:
- type: Dynamic
attributes: {id: FreeRTOS, value: '48000000'}
- type: Boolean
attributes: {id: FREERTOS_DAEMON_TASK_STARTUP_HOOK}
children:
- type: Values
children:
- type: Dynamic
attributes: {id: FreeRTOS, value: 'false'}
- type: User
attributes: {value: 'false'}
- type: Boolean
attributes: {id: FREERTOS_GENERATE_RUN_TIME_STATS}
children:
- type: Values
children:
- type: User
attributes: {value: 'false'}
- type: Boolean
attributes: {id: FREERTOS_INCLUDE_UXTASKGETSTACKHIGHWATERMARK}
children:
- type: Values
children:
- type: User
attributes: {value: 'false'}
- type: Integer
attributes: {id: FREERTOS_PERIPHERAL_CLOCK_HZ}
children:
- type: Values
children:
- type: Dynamic
attributes: {id: FreeRTOS, value: '48000000'}
- type: Integer
attributes: {id: FREERTOS_TIMER_QUEUE_LENGTH}
children:
- type: Values
children:
- type: Dynamic
attributes: {id: FreeRTOS, value: '0'}
- type: Integer
attributes: {id: FREERTOS_TIMER_TASK_PRIORITY}
children:
- type: Values
children:
- type: Dynamic
attributes: {id: FreeRTOS, value: '0'}
- type: User
attributes: {value: '3'}
- type: Integer
attributes: {id: FREERTOS_TIMER_TASK_STACK_DEPTH}
children:
- type: Values
children:
- type: Dynamic
attributes: {id: FreeRTOS, value: '0'}
- type: Integer
attributes: {id: FREERTOS_TOTAL_HEAP_SIZE}
children:
- type: Values
children:
- type: User
attributes: {value: '20480'}
- type: Boolean
attributes: {id: FREERTOS_USE_STATS_FORMATTING_FUNCTIONS}
children:
- type: Values
children:
- type: User
attributes: {value: 'false'}
- type: Boolean
attributes: {id: FREERTOS_USE_TIMERS}
children:
- type: Values
children:
- type: User
attributes: {value: 'false'}
- type: Boolean
attributes: {id: FREERTOS_USE_TRACE_FACILITY}
children:
- type: Values
children:
- type: User
attributes: {value: 'false'}
- type: ElementPosition
attributes: {x: '320', y: '20', id: FreeRTOS}
|
# Copyright lowRISC contributors.
# Licensed under the Apache License, Version 2.0, see LICENSE for details.
# SPDX-License-Identifier: Apache-2.0
# Azure template for installing dependencies from various package managers,
# necessary for building, testing, and packaging OpenTitan.
#
# This template executes:
# - apt-get install for all packages listed in apt-requirements.txt
# - pip install for all packages listed in python-requirements.txt
steps:
- bash: |
set -e
# NOTE: We use sed to remove all comments from apt-requirements.txt,
# since apt-get doesn't actually provide such a feature.
sed 's/#.*//' apt-requirements.txt \
| xargs sudo apt-get install -y
sudo pip3 install -U -r python-requirements.txt
displayName: 'Install package dependencies'
|
<reponame>hito0512/Vitis-AI
# Copyright 2019 Xilinx Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
description: license plate recognition model based on inception-v1.
input size: 96*288
float ops: 1.75G
task: classification
framework: caffe
prune: 'no'
version: 2.0
files:
- name: cf_plate-recognition_96_288_1.75G_2.0
type: float & quantized
board: GPU
download link: https://www.xilinx.com/bin/public/openDownload?filename=cf_plate-recognition_96_288_1.75G_2.0.zip
checksum: 856aae1f27a2ec6e2013d703dcfec63b
- name: plate_num
type: xmodel
board: zcu102 & zcu104 & kv260
download link: https://www.xilinx.com/bin/public/openDownload?filename=plate_num-zcu102_zcu104_kv260-r2.0.0.tar.gz
checksum: 3f257d6f94d3adf8d6dfad694c18abfb
- name: plate_num
type: xmodel
board: vck190
download link: https://www.xilinx.com/bin/public/openDownload?filename=plate_num-vck190-r2.0.0.tar.gz
checksum: abb48163be14c2da683b2018cf02ae30
- name: plate_num
type: xmodel
board: vck50006pe-DPUCVDX8H-DWC
download link: https://www.xilinx.com/bin/public/openDownload?filename=plate_num-vck50006pe-DPUCVDX8H-DWC-r2.0.0.tar.gz
checksum: 5d5c38a924c561c30dee97f84ca3cd1d
- name: plate_num
type: xmodel
board: vck50008pe-DPUCVDX8H
download link: https://www.xilinx.com/bin/public/openDownload?filename=plate_num-vck50008pe-DPUCVDX8H-r2.0.0.tar.gz
checksum: 83be424db087b680830af666ba011d68
- name: plate_num
type: xmodel
board: u50lv-DPUCAHX8H
download link: https://www.xilinx.com/bin/public/openDownload?filename=plate_num-u50lv-DPUCAHX8H-r2.0.0.tar.gz
checksum: a04af7831264ee5bfb9dbb9c4efd6ae7
- name: plate_num
type: xmodel
board: u50lv-DPUCAHX8H-DWC & u55c-DPUCAHX8H-DWC
download link: https://www.xilinx.com/bin/public/openDownload?filename=plate_num-u55c-u50lv-DPUCAHX8H-DWC-r2.0.0.tar.gz
checksum: 2a6159d97a8507230524c3bef723014e
- name: plate_num
type: xmodel
board: u200-DPUCADF8H & u250-DPUCADF8H
download link: https://www.xilinx.com/bin/public/openDownload?filename=plate_num-u200-u250-r2.0.0.tar.gz
checksum: 1bdb1a1cd2961a8b21336e67dd7606f8
license: https://github.com/Xilinx/Vitis-AI/blob/master/LICENSE
|
<filename>circle.yml
machine:
services:
- docker
dependencies:
pre:
- docker pull dhiru/fedora
- docker run --name fedora -v /home/ubuntu:/base -i -t dhiru/fedora /base/JohnTheRipper/src/CircleCI-MinGW.sh
general:
artifacts:
- /home/ubuntu/builds
|
- src: docker.io/calico/node
dst: asia.gcr.io/fighting-falcon/calico/node
tags: ["v3.12.0"]
- src: docker.io/mvance/unbound
dst: registry.f110.dev/mirror/unbound
tags: ["1.9.5"]
- src: docker.io/goharbor/harbor-db
dst: asia.gcr.io/fighting-falcon/goharbor/harbor-db
tags: ["v1.10.0", "v2.1.1"]
- src: ghcr.io/f110/bazel-container
dst: registry.f110.dev/mirror/bazel
tags: ["3.5.1"]
|
steps:
- label: "test"
command: |
# set up environment
source /etc/environment
export FPGA_BOARD=xilinx.com:zc702:part0:1.4
export FPGA_PART=xc7z020clg484-1
export TAP_CORE_LOC=/tools/synopsys/syn/L-2016.03-SP5-5/dw/syn_ver/tap_core.edf
printenv
# create virtual environment
python3.7 -m venv venv
source venv/bin/activate
# run regression script
source regress.sh
# deactivate virtual environment
deactivate
artifact_paths:
- "project/project.runs/*/*.log"
- "project/project.runs/*/*.bit"
- "project/project.runs/*/*.tcl"
- "project/project.runs/*/*.xsa"
- "project/project.sdk/*/*/*.elf"
timeout_in_minutes: 60
agents:
fpga_verif: "true"
|
configMapGenerator:
- name: xss-bot-healthcheck-config
files:
- env
generatorOptions:
disableNameSuffixHash: true
labels:
type: generated
annotations:
note: generated
|
<filename>rtl/apb_gpio/Bender.yml
package:
name: apb_gpio
authors: [ "<NAME> <<EMAIL>>" ]
sources:
- rtl/apb_gpio.sv
|
<reponame>fabianschuiki/sv-tests
name: sv-test-env
channels:
- symbiflow
- pkgw-forge
- conda-forge
dependencies:
# - iverilog
# - moore
# - odin_ii
# - slang
# - surelog
# - sv-parser
# - tree-sitter-verilog
# - verible
# - verilator
# - yosys
# - zachjs-sv2v
- python=3.7
- pip
- pip: # Packages installed from PyPI
- -r file:requirements.txt
|
name: Windows
on:
schedule:
- cron: '0 0 * * 5'
workflow_dispatch:
jobs:
Matrix:
runs-on: ubuntu-latest
outputs:
matrix: ${{ steps.generate.outputs.matrix }}
steps:
- name: '🧰 Repository Checkout'
uses: actions/checkout@v2
- name: '🔧 Generate examples matrix'
id: generate
run: ./.github/generate-job-matrix.py
Implementation:
needs: Matrix
runs-on: windows-latest
strategy:
fail-fast: false
matrix:
include: ${{ fromJson(needs.Matrix.outputs.matrix) }}
name: '🟦 MINGW64 | ${{ matrix.board }} · ${{ matrix.design }}'
defaults:
run:
shell: msys2 {0}
steps:
- name: '🟦 Setup MSYS2'
uses: msys2/setup-msys2@v2
with:
msystem: MINGW64
update: true
install: >
make
mingw-w64-x86_64-yosys
mingw-w64-x86_64-nextpnr
mingw-w64-x86_64-icestorm
mingw-w64-x86_64-prjtrellis
- name: '⚙️ git config'
run: git config --global core.autocrlf input
shell: bash
- name: '🧰 Checkout'
uses: actions/checkout@v2
with:
# The command 'git describe' (used for version) needs the history.
fetch-depth: 0
- name: '🚧 Generate ${{ matrix.board }} ${{ matrix.design }} bitstream'
run: make -C setups/osflow BOARD=${{ matrix.board }} ${{ matrix.design }}
Processor:
runs-on: windows-latest
strategy:
fail-fast: false
matrix:
include: [
{icon: '⬛', installs: 'MINGW32', arch: i686, pkg: 'mcode' },
{icon: '🟦', installs: 'MINGW64', arch: x86_64, pkg: 'llvm' },
]
name: '${{ matrix.icon }} ${{ matrix.installs }} | VUnit'
defaults:
run:
shell: msys2 {0}
steps:
- name: '⚙️ git config'
run: git config --global core.autocrlf input
shell: bash
- name: '🧰 Checkout'
uses: actions/checkout@v2
with:
# The command 'git describe' (used for version) needs the history.
fetch-depth: 0
- name: '${{ matrix.icon }} Setup MSYS2'
uses: msys2/setup-msys2@v2
with:
msystem: ${{ matrix.installs }}
update: true
install: >
make
mingw-w64-${{ matrix.arch }}-ghdl-${{ matrix.pkg }}
mingw-w64-${{ matrix.arch }}-python-pip
mingw-w64-${{ matrix.arch }}-riscv64-unknown-elf-gcc
- name: '⚙️ Build and install Processor Check software'
run: |
make -C sw/example/processor_check \
RISCV_PREFIX=riscv64-unknown-elf- \
clean_all \
USER_FLAGS+=-DRUN_CHECK \
USER_FLAGS+=-DUART0_SIM_MODE \
USER_FLAGS+=-DSUPPRESS_OPTIONAL_UART_PRINT \
MARCH=rv32imac \
info \
all
- name: '🐍 Install VUnit'
run: pip install vunit_hdl
- name: '🚧 Run Processor Hardware Tests with VUnit'
run: ./sim/run.py --ci-mode -v
|
working_dir : # Module data will be written to this path. If not given, use path of this file. Default: not given / empty
dut : dut_mio.yaml # DUT hardware configuration (.yaml). Change to dut_mio_gpac.yaml for GPAC support.
dut_configuration : dut_configuration_mio.yaml # DUT init configuration (.yaml). Change to dut_configuration_mio_gpac.yaml for GPAC support.
configuration : # FE configuration file, text (.cfg) or HDF5 (.h5) file. If not given, latest valid configuration (run status FINISHED) will be taken. If a number is given, configuration from run with specified number will be taken.
flavor : fei4a # FEI4 flavor/type for initial configuration. Valid values: 'fei4a' or 'fei4b'
chip_address : # Chip Address for initial configuration, if not given, broadcast bit will be set
module_id : module_test # module identifier / name, sub-folder with given name will be created inside working_dir
# *** global run configuration ***
#
#run_conf:
# send_data : 'tcp://127.0.0.1:5678'
# comment : ''
# reset_rx_on_error : False
#
# *** scan specific run configuration ***
#
#Fei4Tuning:
# enable_shift_masks : ["Enable", "C_Low", "C_High"]
# target_threshold : 50 # target threshold
# target_charge : 280 # target charge
# target_tot : 5 # target ToT
# global_iterations : 4
# local_iterations : 3
#
#AnalogScan:
# scan_parameters : {'PlsrDAC': 280}
# enable_shift_masks : ["Enable", "C_Low", "C_High"]
#
#ThresholdScan:
# scan_parameters : {'PlsrDAC': [0, 100]}
# enable_shift_masks : ["Enable", "C_Low", "C_High"]
#
|
# For Windows based CI
environment:
matrix:
# For Python versions available on Appveyor, see
# https://www.appveyor.com/docs/windows-images-software/#python
- BUILD_NAME: py38-unit
PYTHON: "C:\\Python38"
- BUILD_NAME: py38-lint
PYTHON: "C:\\Python38"
- BUILD_NAME: py38-docs
PYTHON: "C:\\Python38"
- BUILD_NAME: py38-acceptance-ghdl
PYTHON: "C:\\Python38"
- BUILD_NAME: py38-vcomponents-ghdl
PYTHON: "C:\\Python38"
install:
- "git submodule update --init --recursive"
- "%PYTHON%\\python.exe -m pip install -U pip"
- "%PYTHON%\\python.exe -m pip install -U virtualenv"
- "%PYTHON%\\python.exe -m pip install tox"
- "curl -fsSL -o ghdl.zip https://github.com/ghdl/ghdl/releases/download/v0.36/ghdl-0.36-mingw32-mcode.zip"
- "7z x ghdl.zip -o../ghdl -y"
- "mv ../ghdl/GHDL/0.36-mingw32-mcode/ ../ghdl-v0.36"
- "rm -rf ../ghdl ghdl.zip"
- "set PATH=%PATH%;../ghdl-v0.36/bin"
build: off
test_script:
- "%PYTHON%\\python.exe -m tox -e %BUILD_NAME%"
|
name: Implementation
on:
push:
paths:
- 'setups/**'
- 'rtl/**'
pull_request:
paths:
- 'setups/**'
- 'rtl/**'
schedule:
- cron: '0 0 * * 5'
workflow_dispatch:
jobs:
Matrix:
runs-on: ubuntu-latest
outputs:
matrix: ${{ steps.generate.outputs.matrix }}
steps:
- name: '🧰 Repository Checkout'
uses: actions/checkout@v2
- name: '🔧 Generate examples matrix'
id: generate
run: ./.github/generate-job-matrix.py
All-in-one:
needs: Matrix
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
include: ${{ fromJson(needs.Matrix.outputs.matrix) }}
name: '🛳️ All-in-one | ${{ matrix.board }} · ${{ matrix.design }}'
env:
GHDL_PLUGIN_MODULE: ghdl
steps:
- name: '🧰 Repository Checkout'
uses: actions/checkout@v2
- name: '⚙️ Generate ${{ matrix.board }} ${{ matrix.design }} bitstream'
uses: docker://ghcr.io/hdl/debian-buster/impl
with:
args: make -C setups/examples BOARD=${{ matrix.board }} ${{ matrix.design }}
- name: '📤 Upload Artifact: ${{ matrix.board }} ${{ matrix.design }} bitstream and reports'
uses: actions/upload-artifact@v2
with:
name: ${{ matrix.board }}-${{ matrix.design }}
path: |
setups/examples/${{ matrix.bitstream }}
setups/osflow/${{ matrix.board }}/*-report.txt
|
<filename>.github/workflows/fixed_point_default.yml
name: Fixed Point Default
on:
push:
paths:
- 'fixed_point/**'
- '!fixed_point/README.md'
- '.github/workflows/fixed_point_default.yml'
defaults:
run:
working-directory: fixed_point
shell: bash
jobs:
cppcheck:
runs-on: ubuntu-20.04
steps:
- uses: actions/checkout@v2
- name: Install cppcheck
run: sudo apt-get install cppcheck -y
- name: Run an analysis of the code
run: make check
build:
runs-on: ubuntu-20.04
steps:
- uses: actions/checkout@v2
- name: Install googletest, google-mock
run: >
sudo apt-get install -y
googletest
libgtest-dev
google-mock
libgmock-dev
- name: Compile the code
run: make -j
- name: Run the Unit Tests
run: make test
|
name: sim-gl-compile
parameters:
sdf: False
PWR_AWARE: False
tool: "XCELIUM"
num_glb_tiles: 16
commands:
- bash run_sim.sh
inputs:
- adk
- design.vcs.v
- design.vcs.pg.v
- header
- design.sdf
- glb_tile.vcs.v
- glb_tile.vcs.pg.v
- glb_tile.sdf
- glb_tile_sram.v
- glb_tile_sram_pwr.v
outputs:
- sim.log
postconditions:
- assert File( 'outputs/sim.log' ) # must exist
# Basic error checking
- assert 'Error,' not in File( 'outputs/sim.log' )
- assert '*E,' not in File( 'outputs/sim.log' )
|
<filename>documentation/metadata/an001.yaml
---
title: The SPU Assembly Language
author:
- Felix "xq" Queißner
date: April 28, 2020
abstract: |
The highly flexible nature of the instruction set of the *SPU Mark II* makes it
different to put all possible instructions into useful mnemonics. So the assembly
language allows the coder to put modifiers on each instruction. These modifiers
allow to change any field of the instruction they are put on.
|
<filename>examples/mercury_pe1_zx1/dma_example/config.yml
---
name: PE1_XZ1_7030_DMA
board: boards/mercury_zx1_pe1
version: 0.1.1
cores:
- fpga/cores/axi_ctl_register_v1_0
- fpga/cores/axi_sts_register_v1_0
- fpga/cores/tlast_gen_v1_0
- fpga/cores/dna_reader_v1_0
memory:
- name: control
offset: '0x60000000'
range: 4K
- name: status
offset: '0x50000000'
range: 4K
- name: xadc
offset: '0x43C00000'
range: 64K
- name: ram_mm2s
offset: '0x30000000'
range: 128M
- name: ram_s2mm
offset: '0x38000000'
range: 128M
- name: dma
offset: '0x80000000'
range: 64K
- name: axi_hp0
offset: '0xF8008000'
range: 4K
- name: axi_hp2
offset: '0xF800A000'
range: 4K
- name: ocm_mm2s
offset: '0xFFFF0000'
range: 32K
- name: ocm_s2mm
offset: '0xFFFF8000'
range: 32K
- name: sclr
offset: '0xF8000000'
range: 64K
- name: SDRAM
offset: '0xA0000000'
range: 256M
control_registers:
- led
status_registers:
- forty_two
parameters:
fclk0: 125000000 # FPGA clock speed in Hz
fclk1: 100000000 # FPGA clock speed in Hz
xdc:
- boards/mercury_zx1_pe1/config/ports.xdc
- boards/mercury_zx1_pe1/config/pcie.xdc
- ./constraints.xdc
drivers:
- server/drivers/common.hpp
- ./dma_example.hpp
web:
- ./web/index.html
- web/koheron.ts
- web/led-blinker.ts
- ./web/app.ts
- web/main.css
|
<filename>vlog_netlist/fll/fll_design/rtl/configure.yml
name: rtl
commands:
- |
mkdir -p outputs
cp -r $VLOG_HOME/mux_4t1 ./outputs
cat "./outputs/mux_4t1/qr_4t1_mux_top.sv" "./outputs/mux_4t1/qr_mux_fixed.sv" "./outputs/mux_4t1/ff_c.sv" "./outputs/mux_4t1/tx_inv.sv" > "./outputs/design.v"
python gen_tcl.py
outputs:
- design.v
- read_design.tcl
|
---
name: phase-noise-analyzer
board: boards/alpha250
version: 0.1.1
cores:
- fpga/cores/axi_ctl_register_v1_0
- fpga/cores/axi_sts_register_v1_0
- fpga/cores/dna_reader_v1_0
- fpga/cores/axis_constant_v1_0
- fpga/cores/latched_mux_v1_0
- fpga/cores/edge_detector_v1_0
- fpga/cores/comparator_v1_0
- fpga/cores/tlast_gen_v1_0
- fpga/cores/unrandomizer_v1_0
- boards/alpha250/cores/precision_dac_v1_0
- boards/alpha250/cores/spi_cfg_v1_0
- fpga/cores/axis_lfsr_v1_0
- fpga/cores/phase_unwrapper_v1_0
- fpga/cores/boxcar_filter_v1_0
memory:
- name: control
offset: '0x40000000'
range: 4K
- name: status
offset: '0x50000000'
range: 4K
- name: xadc
offset: '0x43C00000'
range: 64K
- name: ram
offset: '0x1E000000'
range: 32M
- name: dma
offset: '0x80000000'
range: 64K
- name: axi_hp0
offset: '0xF8008000'
range: 4K
control_registers:
- mmcm
- precision_dac_ctl
- precision_dac_data[2]
- spi_cfg_data
- spi_cfg_cmd
- phase_incr[4]
- cordic
status_registers:
- spi_cfg_sts
- adc[n_adc]
parameters:
fclk0: 200000000
fclk1: 143000000
adc_clk: 200000000
dac_width: 16
adc_width: 16
n_adc: 2
cic_differential_delay: 1
cic_decimation_rate: 20
cic_n_stages: 6
xdc:
- boards/alpha250/config/ports.xdc
drivers:
- boards/alpha250/drivers/common.hpp
- boards/alpha250/drivers/eeprom.hpp
- boards/alpha250/drivers/gpio-expander.hpp
- boards/alpha250/drivers/temperature-sensor.hpp
- boards/alpha250/drivers/clock-generator.hpp
- boards/alpha250/drivers/power-monitor.hpp
- boards/alpha250/drivers/ltc2157.hpp
- boards/alpha250/drivers/ad9747.hpp
- boards/alpha250/drivers/precision-adc.hpp
- boards/alpha250/drivers/precision-dac.hpp
- boards/alpha250/drivers/spi-config.hpp
- ./dds.hpp
- ./dma.hpp
web:
- web/index.html
- web/main.css
- web/koheron.ts
|
## Copyright lowRISC contributors.
## Licensed under the Apache License, Version 2.0, see LICENSE for details.
## SPDX-License-Identifier: Apache-2.0
Device: rv32imc
Vendor: opentitan
ISA: RV32IMC
misa:
implemented: True
MXL:
range:
rangelist: [[1]]
mode: Unchanged
Extensions:
bitmask:
mask: 0x0
default: 0x1104
hw_data_misaligned_support: True
mtvec:
MODE:
range:
rangelist: [[1]]
BASE:
range:
rangelist: [[0x20000020]]
mstatus:
MPP:
range:
rangelist: [[3]]
User_Spec_Version: "2.3"
Privilege_Spec_Version: "1.11"
mvendorid:
implemented: false
marchid:
implemented: false
mimpid:
implemented: false
mhartid: 0
mcycle:
is_hardwired: true
implemented: true
minstret:
is_hardwired: true
implemented: true
|
<reponame>recogni/cluster_interconnect
package:
name: cluster_interconnect
dependencies:
common_cells: { git: "https://github.com/pulp-platform/common_cells.git", rev: v1.13.0 }
sources:
# Source files grouped in levels. Files in level 0 have no dependencies on files in this
# package. Files in level 1 only depend on files in level 0, files in level 2 on files in
# levels 1 and 0, etc. Files within a level are ordered alphabetically.
# Level 0
- rtl/tcdm_interconnect/tcdm_interconnect_pkg.sv
- rtl/tcdm_interconnect/addr_dec_resp_mux.sv
- rtl/tcdm_interconnect/amo_shim.sv
# Level 1
- rtl/tcdm_interconnect/xbar.sv
# Level 2
- rtl/tcdm_interconnect/clos_net.sv
- rtl/tcdm_interconnect/bfly_net.sv
# Level 3
- rtl/tcdm_interconnect/tcdm_interconnect.sv
- target: tcdm_test
include_dirs:
- tb/common/
- tb/tb_tcdm_interconnect/hdl
files:
# Level 0
- tb/tb_tcdm_interconnect/hdl/tb_pkg.sv
# Level 2
- tb/tb_tcdm_interconnect/hdl/tcdm_interconnect_wrap.sv
# Level 2
- tb/tb_tcdm_interconnect/hdl/tb.sv
|
udma_i2s:
files: [
rtl/i2s_clk_gen.sv,
rtl/i2s_rx_channel.sv,
rtl/i2s_tx_channel.sv,
rtl/i2s_ws_gen.sv,
rtl/i2s_clkws_gen.sv,
rtl/i2s_txrx.sv,
rtl/cic_top.sv,
rtl/cic_integrator.sv,
rtl/cic_comb.sv,
rtl/pdm_top.sv,
rtl/udma_i2s_reg_if.sv,
rtl/udma_i2s_top.sv,
]
|
<gh_stars>1-10
builds:
build1:
test_discoverer:
type: uvm_test
attr:
test_dir: $JVS_PRJ_HOME/build1_testcases
build2:
|
name: CI/Console
on:
push:
branches: [ master ]
pull_request:
branches: [ master ]
jobs:
build:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v2
- uses: ghdl/setup-ghdl-ci@nightly
with:
backend: mcode
- name: Setup Python
uses: actions/setup-python@v2
- name: Install Requirements
run: |
python -m pip install --upgrade pip
pip install setuptools wheel
pip install -r requirements.txt
- name: Build
run: |
python setup.py sdist bdist_wheel
- name: Install
run: |
python -m pip install --find-links=dist VHDLTest
- name: Run Script
run: VHDLTest --config test.yaml --log test.log --junit test.xml --exit-0
working-directory: example
|
on:
- push
name: Linux CI
jobs:
build:
name: Build (Ubuntu)
runs-on: ubuntu-20.04
steps:
- uses: actions/checkout@v2
with:
submodules: true
- name: Get LLVM and Bison
run: |
sudo apt-get install llvm-11-dev bison
echo "LLVM_CONFIG=llvm-config-11" >> $GITHUB_ENV
echo "NPROC=$(nproc)" >> $GITHUB_ENV
- name: Make
run: |
${{ env.LLVM_CONFIG }} --link-static # Is this imperative?
make -j${{ env.NPROC }} release
|
<gh_stars>1-10
KerasJson: /media/data/projects/landsat_soil_classifier/models/ANN50x50.json
KerasH5: /media/data/projects/landsat_soil_classifier/models/ANN50x50_weights.h5
InputData: /media/data/projects/landsat_soil_classifier/data/Landsat_x_test.dat
OutputPredictions: /media/data/projects/landsat_soil_classifier/data/Landsat_y_test.dat
OutputDir: /media/data/projects/landsat_soil_classifier/fpga/hls_ANN50x50
ProjectName: fpga_ANN50x50
XilinxPart: xazu7eg-fbvb900-1-i
ClockPeriod: 24
IOType: io_parallel
HLSConfig:
Model:
Precision: ap_fixed<16,10>
ReuseFactor: 4
|
<filename>mie-workflows/vod_translate_workflow.yaml
AWSTemplateFormatVersion: '2010-09-09'
Description: "Media Insights Engine - Workflow to run VOD transcribe and translate application"
Parameters:
WorkflowCustomResourceArn:
Type: String
Description: "ARN of the Media Insights custom resource that handles creating operations, stages and workflows"
OperatorLibraryStack:
Description: "Name of the operator library stack"
Type: String
Resources:
# Stages
MediaconvertStageT:
Type: Custom::CustomResource
Properties:
ServiceToken: !Ref WorkflowCustomResourceArn
ResourceType: "Stage"
Name: "MediaconvertStageT"
Operations:
- Fn::ImportValue:
Fn::Sub: "${OperatorLibraryStack}:Mediaconvert"
TranscribeStageT:
Type: Custom::CustomResource
Properties:
ServiceToken: !Ref WorkflowCustomResourceArn
ResourceType: "Stage"
Name: "TranscribeStageTT"
Operations:
- Fn::ImportValue:
Fn::Sub: "${OperatorLibraryStack}:TranscribeVideo"
WebCaptionsStageT:
Type: Custom::CustomResource
Properties:
ServiceToken: !Ref WorkflowCustomResourceArn
ResourceType: "Stage"
Name: "WebCaptionsStageT"
Operations:
- Fn::ImportValue:
Fn::Sub: "${OperatorLibraryStack}:WebCaptions"
CaptionEditingWaitStageT:
Type: Custom::CustomResource
Properties:
ServiceToken: !Ref WorkflowCustomResourceArn
ResourceType: "Stage"
Name: "CaptionEditingWaitStageT"
Operations:
- Fn::ImportValue:
Fn::Sub: "${OperatorLibraryStack}:Wait"
TranslateStageT:
Type: Custom::CustomResource
Properties:
ServiceToken: !Ref WorkflowCustomResourceArn
ResourceType: "Stage"
Name: "TranslateStageT"
Operations:
- Fn::ImportValue:
Fn::Sub: "${OperatorLibraryStack}:Translate"
- Fn::ImportValue:
Fn::Sub: "${OperatorLibraryStack}:TranslateWebCaptions"
# TranslateBatchStage:
# Type: Custom::CustomResource
# Properties:
# ServiceToken: !Ref WorkflowCustomResourceArn
# ResourceType: "Stage"
# Name: "TranslateBatchStage"
# Operations:
# - Fn::ImportValue:
# Fn::Sub: "${OperatorLibraryStack}:TranslateWebCaptions"
CaptionFileStageT:
Type: Custom::CustomResource
Properties:
ServiceToken: !Ref WorkflowCustomResourceArn
ResourceType: "Stage"
Name: "CaptionFileStageT"
Operations:
- Fn::ImportValue:
Fn::Sub: "${OperatorLibraryStack}:WebToSRTCaptions"
- Fn::ImportValue:
Fn::Sub: "${OperatorLibraryStack}:WebToVTTCaptions"
# Workflow
TranslatorWorkflow:
DependsOn:
- MediaconvertStageT
- TranscribeStageT
- WebCaptionsStageT
# - TranslateBatchStage
- CaptionEditingWaitStageT
- TranslateStageT
- CaptionFileStageT
Type: Custom::CustomResource
Properties:
ServiceToken: !Ref WorkflowCustomResourceArn
ResourceType: "Workflow"
Name: "TranslatorWorkflow"
StartAt: !GetAtt MediaconvertStageT.Name
Stages: !Sub
- |-
{
"${MediaconvertStageT}":{
"Next": "${TranscribeStageT}"
},
"${TranscribeStageT}":{
"Next": "${WebCaptionsStageT}"
},
"${WebCaptionsStageT}":{
"Next": "${CaptionEditingWaitStageT}"
},
"${CaptionEditingWaitStageT}":{
"Next": "${TranslateStageT}"
},
"${TranslateStageT}":{
"Next": "${CaptionFileStageT}"
},
"${CaptionFileStageT}":{
"End": true
}
}
- {
MediaconvertStageT: !GetAtt MediaconvertStageT.Name,
TranscribeStageT: !GetAtt TranscribeStageT.Name,
WebCaptionsStageT: !GetAtt WebCaptionsStageT.Name,
CaptionEditingWaitStageT: !GetAtt CaptionEditingWaitStageT.Name,
TranslateStageT: !GetAtt TranslateStageT.Name,
CaptionFileStageT: !GetAtt CaptionFileStageT.Name
}
|
<reponame>lrsb/digitallogicdesign-vhdl-2019<gh_stars>0
language: python
addons:
apt:
update: true
install:
- sudo apt-get install -y gnat
- git clone https://github.com/ghdl/ghdl.git
- mkdir $TRAVIS_BUILD_DIR/ghdl/build
- cd $TRAVIS_BUILD_DIR/ghdl/build
- $TRAVIS_BUILD_DIR/ghdl/configure --prefix=$TRAVIS_BUILD_DIR/ghdl/
- make
- make install
- pip install vunit_hdl
- chmod +x $TRAVIS_BUILD_DIR/src/run.py
- export PATH="$TRAVIS_BUILD_DIR/ghdl/bin:$PATH"
before_script:
- cd $TRAVIS_BUILD_DIR/src
- gcc test_gen.c -o test_gen -std=c99
- ./test_gen --random-cases=200
script:
- python $TRAVIS_BUILD_DIR/src/run.py
|
#this is a example of how ENC ip is used in our yaml
IR_RECIEVE:
IP: __IR_RECIEVE
CMD: IRR
ir_sda:
PIN: A34
DIRECTION: in
ir_recieve_status:
PIN: A35
DIRECTION: out
|
<filename>Bender.yml
package:
name: per2axi
dependencies:
axi_slice: { git: "https://github.com/pulp-platform/axi_slice.git", version: 1.1.3 }
sources:
- src/per2axi_busy_unit.sv
- src/per2axi_req_channel.sv
- src/per2axi_res_channel.sv
- src/per2axi.sv
|
name: tiny-mlperf-fifo-env-take-2
channels:
- conda-forge
dependencies:
- python=3.7
- pip
- pip:
- jupyter
- pydot
- graphviz
- git+https://github.com/nicologhielmetti/hls4ml.git@fifo_depth_opt#egg=hls4ml[profiling]
- matplotlib
- numpy
- PyYAML
- scikit-learn
- tensorflow
- tqdm
- git+https://github.com/google/qkeras#egg=qkeras
- keras-flops
- git+https://github.com/kentaroy47/keras-Opcounter
- keras-tuner
- setgpu
- mplhep
|
<filename>Bender.yml
package:
name: fpu_interco
authors:
- "<NAME> <<EMAIL>>"
- "<NAME> <<EMAIL>>"
- "<NAME> <<EMAIL>>"
- "<NAME> <<EMAIL>>"
dependencies:
fpnew: { git: "https://github.com/pulp-platform/fpnew.git", version: 0.6.4 }
riscv: { git: "<EMAIL>:AlSaqr-platform/riscv_nn.git", rev: "92506f3351c18edf53758add7dd5a4fd69b7c8c1"}
sources:
# Source files grouped in levels. Files in level 0 have no dependencies on files in this
# package. Files in level 1 only depend on files in level 0, files in level 2 on files in
# levels 1 and 0, etc. Files within a level are ordered alphabetically.
# Level 0
- FP_WRAP/fp_iter_divsqrt_msv_wrapper_2_STAGE.sv
- FP_WRAP/fpnew_wrapper.sv
- RTL/AddressDecoder_Resp_FPU.sv
- RTL/FanInPrimitive_Req_FPU.sv
- RTL/FanInPrimitive_Resp_FPU.sv
- RTL/FPU_clock_gating.sv
- RTL/fpu_demux.sv
- RTL/LFSR_FPU.sv
- RTL/optimal_alloc.sv
- RTL/RR_Flag_Req_FPU.sv
# Level 1
- RTL/AddressDecoder_Req_FPU.sv
- RTL/ArbitrationTree_FPU.sv
- RTL/RequestBlock_FPU.sv
- RTL/ResponseTree_FPU.sv
# Level 2
- RTL/ResponseBlock_FPU.sv
# Level 3
- RTL/XBAR_FPU.sv
# Level 4
- RTL/shared_fpu_cluster.sv
|
<filename>.github/workflows/atomsim-atombones.yml
name: atomsim-atombones
on:
push:
branches: [ main ]
pull_request:
branches: [ main ]
# Allows you to run this workflow manually from the Actions tab
workflow_dispatch:
jobs:
build:
name: atomsim-atombones
runs-on: ubuntu-20.04
steps:
- uses: actions/checkout@v2
with:
path: riscv-atom
- name: Install Verilator
run:
sudo apt-get install verilator
- name: Set RVATOM directory
run:
echo "RVATOM=$GITHUB_WORKSPACE/riscv-atom" >> $GITHUB_ENV
- name: Build AtomSim for AtomBones Target
run:
cd riscv-atom && make Target=atombones sim
|
<reponame>SubjeBilisim/anasymod
analog_ctrl_inputs:
in_:
abspath: 'tb_i.in_'
range: 10
init_value: 0.0
analog_ctrl_outputs:
out:
abspath: 'tb_i.out'
range: 10
|
<filename>hw/vendor/pulp_platform_apb_uart/Bender.yml
package:
name: apb_uart
authors: [
"<NAME>",
"<NAME>"
]
sources:
- src/slib_clock_div.sv
- src/slib_counter.sv
- src/slib_edge_detect.sv
- src/slib_fifo.sv
- src/slib_input_filter.sv
- src/slib_input_sync.sv
- src/slib_mv_filter.sv
- src/uart_baudgen.sv
- src/uart_interrupt.sv
- src/uart_receiver.sv
- src/uart_transmitter.sv
- src/apb_uart.sv
- src/apb_uart_wrap.sv
|
name: ICan'tBelieveIt'sNotCrypto
# Long form description.
description: |+
Every journey starts with a single step
Guest challenge by TethysSvensson
# The flag
flag: CTF{5t3p_by_st3p_I_m4k3_my_w4y}
# Task category. (one of hw, crypto, pwn, rev, web, net, misc)
category: misc
# === the fields below will be filled by SRE or automation ===
# Task label
label: ''
# URL for web challenges
link: ''
# host/port for non-web challenges
host: 'steps.2021.ctfcompetition.com 1337'
# the URL for attachments, to be filled in by automation
attachment: ''
# is this challenge released? Will be set by SREs
visible: false
|
<reponame>bver/GERET<gh_stars>1-10
---
algorithm:
class: ParetoGPSimplified
population_size: 800
archive_size: 200
generations_per_cascade: 10
mutation_probability: 0.00
shorten_individual: true
tournament_size: 10
init:
method: ramped # grow or full or ramped
sensible_depth: 7
termination:
max_steps: 1000
on_individual: :stopping_condition
grammar:
class: Abnf::File
filename: sample/santa_fe_ant_trail/grammar.abnf
mapper:
class: DepthLocus
crossover:
class: CrossoverRipple
margin: 2 #1
step: 2
mutation:
class: MutationRipple
store:
class: Store
filename: ./ant_paretogp_simplified.store
report:
class: AntReport
require: sample/santa_fe_ant_trail/ant_report.rb
individual:
class: AntIndividualMultiObjective
require: sample/santa_fe_ant_trail/ant_individual.rb
shorten_chromozome: false
|
<gh_stars>0
package:
name: fw-wishbone-interconnect
version: None
deps:
- name: fwprotocol-defs
url: https://github.com/featherweight-ip/fwprotocol-defs.git
dev-deps:
- name: fwprotocol-defs
url: https://github.com/featherweight-ip/fwprotocol-defs.git
- name: fw-wishbone-sram-ctrl
url: https://github.com/featherweight-ip/fw-wishbone-sram-ctrl.git
- name: cocotb
src: pypi
- name: vlsim
src: pypi
- name: assertpy
src: pypi
- name: mkdv
url: https://github.com/fvutils/mkdv.git
- name: pybfms
src: pypi
- name: pybfms-wishbone
url: https://github.com/pybfms/pybfms-wishbone.git
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.