Search is not available for this dataset
content
stringlengths 0
376M
|
---|
SPI_SLAVE_FREERTOS:
IP: __SPI_SLAVE_FREERTOS
CMD: SPS
ALT: a
ss_sck_freertos:
PIN: A34
DIRECTION: IN
ss_mosi_freertos:
PIN: A36
DIRECTION: IN
ss_ssel_freertos:
PIN: A37
DIRECTION: IN
ss_miso_freertos:
PIN: A38
DIRECTION: OUT
spi_slave_result_freertos:
PIN: A39
DIRECTION: OUT
|
pip_test:
- src_wire: R3C3_PLC.PLC/JDI0_SLICEA
dst_wire: R3C3/JF0
bel_pin_test:
- bel: R7C3_PLC.PLC/SLICEA_LUT0
pin: D
wire: R7C3_PLC.PLC/JD0_SLICEA
|
#Descriptions:
#CMD: cpld command
#A: Assitant Board
#T: Target Board
#FUNC0: the function of this pin connection
#DIRECTION: A2T T2A T2T
#FRDM A-B-C-D DEFINITION
#| NAME | SIZE | CPLD_SCH | FRDM_SCH
#| A | (10*2) | JJ_2 | ?
#| B | (8*2) | JJ_1 | ?
#| C | (5*2) | JJ_3 | ?
#| D | (8*2) | JJ_4 | ?
#SINGLE: default 0, if the pin header is single on FRDM-Board, this should be set 1
SINGLE: 0
I2C_SLAVE_FOR_CASE:
IP: __I2C_SLAVE_FOR_CASE
CMD: ICS
scl_slv:
PIN: CPLD_IO100
DIRECTION: IN
sda:
PIN: CPLD_IO98
DIRECTION: INOUT
|
<gh_stars>1-10
#
# List of IPs and relative branch/commit-hash/tag.
# Uses the YAML syntax.
#
# Examples:
#
# or10n:
# commit: tags/PULP3_final
# domain: [cluster]
# udma:
# commit: 62b10440
# domain: [soc]
# axi_slice:
# commit: master
# domain: [soc,cluster]
# If a *tag* or *commit* is referenced, the IP will be in a
# state of DETACHED HEAD. Before committing any additional
# work, make sure to checkout a branch.
#
# SoC
apb/apb_uart:
commit: 3bc0006c070b0b9c18719586cab2845cd6e8b63c
domain: [soc]
group: pulp-platform
axi/axi_id_remap:
commit: v0.1.0
domain: [soc]
group: pulp-platform
axi/axi_mem_if:
commit: d1ca159ab5f4fa63399c03b6f18e42ae65581f34
domain: [soc]
group: pulp-platform
axi/axi_rab:
commit: v0.1.0
domain: [soc]
group: pulp-platform
axi/axi2apb:
commit: b4c915fcd9526ab6e7378a3e43555189c898b89a
domain: [soc]
group: pulp-platform
# cluster + SoC
axi/axi_node:
commit: 2f1e322c07b483990f31b22e20b8db635b1bec97
group: pulp-platform
axi/axi_slice:
commit: f8886bd3f2d4967aaccff15b67bf1f9e1a0e3453
group: pulp-platform
axi/axi_slice_dc:
commit: a755b1d84a605a45d6e63d71996370bbf473542f
group: pulp-platform
common_cells:
commit: 5b7021208a3ff818a617b497c5b5564f412fe0a8
group: pulp-platform
fpga-support:
commit: v0.3.2
group: pulp-platform
pkg/cfmath:
commit: v0.1.0
group: pulp-platform
# cluster
riscv:
commit: <PASSWORD>
domain: [cluster]
group: pulp-platform
pulp_cluster:
commit: <PASSWORD>
domain: [cluster]
group: pulp-platform
|
<reponame>slaclab/epix-hr-10k-2m
ePixHr10kT:
enable: True
ForceWrite: False
EpixHR:
enable: True
PowerSupply:
enable: True
DigitalEn: True
AnalogEn: True
|
<reponame>CSEYJ/Flightplan
all: # All experiments must contain these fields
- description
- experiment
- repositories:
- P4Boosters
- files:
- documentation.md
memcached: # Memcached must contain these in addition
- files:
- data
- bitstream.tar.gz
e2e:
- repositories:
- TofinoP4Boosters
- files:
- data
- Encoder.tar.gz
- Decoder.tar.gz
- Memcached.tar.gz
fec:
- files:
- data
- Encoder.tar.gz
- Decoder.tar.gz
- repositories:
- TofinoP4Boosters
overhead: []
offload: []
|
name: Regression Tests
on: [push, pull_request]
jobs:
build:
name: Python ${{ matrix.python-version }} (${{ matrix.group }}/10)
runs-on: ubuntu-20.04
strategy:
matrix:
python-version: [3.9]
group: [1, 2, 3, 4, 5]
steps:
- uses: actions/checkout@v2
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python-version }}
- name: Install Verilator
run: |
sudo apt install -y --no-install-recommends make g++ perl python3 autoconf flex bison libfl2 libfl-dev zlibc zlib1g zlib1g-dev
git clone https://github.com/verilator/verilator.git
cd verilator
git checkout v4.106
autoconf
./configure
make -j $(nproc)
sudo make install
- name: Install Python dependencies
run: |
python -m pip install --upgrade pip
pip install tox tox-gh-actions
- name: Test with tox
env:
SIM: verilator
CXX: 'g++'
CC: 'gcc'
PYTHON_VERSION: 3.9
FULL_REGRESSION: 1
TOX_TESTENV_PASSENV: GITHUB_ACTIONS
run: tox
|
name: usb_phy
clock_port: clk
verilog:
- timescale.v
- usb_phy.v
- usb_rx_phy.v
- usb_tx_phy.v
|
---
name: integration
# Build and test starting from latest bootstrap release.
on:
pull_request: {branches: master}
push:
concurrency: ci
jobs:
AMD64_DARWIN:
uses: modula3/cm3/.github/workflows/amd64_darwin.yml@master
AMD64_LINUX:
uses: modula3/cm3/.github/workflows/amd64_linux.yml@master
AMD64_MINGW:
uses: modula3/cm3/.github/workflows/amd64_mingw.yml@master
AMD64_NT:
uses: modula3/cm3/.github/workflows/amd64_nt.yml@master
I386_LINUX:
uses: modula3/cm3/.github/workflows/i386_linux.yml@master
I386_NT:
uses: modula3/cm3/.github/workflows/i386_nt.yml@master
|
<reponame>Hog-CERN/Hog
# Copyright 2018-2021 The University of Birmingham
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
include:
- local: YAML/hog-common.yml
- local: YAML/hog-main.yml
generate-config:
# only:
# refs:
# - merge_requests
stage: dynamic_generate
image: gitlab-registry.cern.ch/hog/hog-docker:tclsh
script:
- if [[ ${HOG_CHECK_PROJVER} != 1 ]]; then
echo "Project version checker is disabled.";
tclsh ./Hog/Tcl/utils/generate_yaml.tcl -runall;
else
tclsh ./Hog/Tcl/utils/generate_yaml.tcl;
fi;
- echo "CREATE_JOB_ID=${CI_JOB_ID}" > .env
artifacts:
paths:
- generated-config.yml
- .env
rules:
- if: $CI_MERGE_REQUEST_ID
tags:
- docker
child-pipeline:
# only:
# refs:
# - merge_requests
stage: dynamic_triggers
trigger:
include:
- artifact: generated-config.yml
job: generate-config
strategy: depend
rules:
- if: $CI_MERGE_REQUEST_ID
collect_artifacts:
image: gitlab-registry.cern.ch/hog/hog-docker:tclsh
stage: collect
script:
- git status
- git submodule init
- git submodule update Hog
- if [ ! -f .env ]; then
echo "You must save a .env file as an upstream artifact containing the Child Pipeline Creator's Job ID as CREATE_JOB_ID";
exit 1;
else
source .env;
fi;
- tclsh Hog/Tcl/CI/download_child_artifacts.tcl $HOG_PUSH_TOKEN $CI_API_V4_URL $CI_PROJECT_ID $CI_COMMIT_SHA $CREATE_JOB_ID
- if [ $? -ne 0 ]; then
exit 1;
fi
coverage: '/Statements\s+:\s(\d+.?\d+)%/'
rules:
- if: $CI_MERGE_REQUEST_ID
artifacts:
paths:
- bin/
expire_in: 30 days
tags:
- docker
|
kind: NetworkPolicy
apiVersion: networking.k8s.io/v1
metadata:
name: allow-dns
namespace: default
spec:
podSelector: {}
policyTypes:
- Egress
egress:
- to:
ports:
- protocol: UDP
port: 53
- protocol: TCP
port: 53
|
name: Test Github Action
on: [push, pull_request]
jobs:
test:
runs-on: ubuntu-latest
steps:
- name: Set up Git repository
uses: actions/checkout@v2
- name: Run VUnit tests
uses: VUnit/[email protected]
with:
run_file: run.py
|
---
input_file : ../akane/11_merge_sort_core_2.akd
output_file : 11_merge_sort_core_2.md
image_url :
"Fig.1 マージソートコアの構成" : "https://qiita-image-store.s3.ap-northeast-1.amazonaws.com/0/24981/1afcda19-1ee7-2c05-96b4-d682c1876da9.jpeg"
"Fig.2 4-way マージソートツリーによる16ワードデータのソート例" : "https://qiita-image-store.s3.ap-northeast-1.amazonaws.com/0/24981/ff1f852d-d9a8-5028-2054-36508673a104.jpeg"
"Fig.3 最初のパスのDMA転送(ストリーム入力無し)" : "https://qiita-image-store.s3.ap-northeast-1.amazonaws.com/0/24981/a032bab6-60be-dcec-7f16-42c77ee3243e.jpeg"
"Fig.4 最初のパスのDMA転送(ストリーム入力あり)" : "https://qiita-image-store.s3.ap-northeast-1.amazonaws.com/0/24981/de34f24a-8bd4-0374-0755-7a7ba068ead4.jpeg"
"Fig.5 マージソートコアのストリーム入力" : "https://qiita-image-store.s3.ap-northeast-1.amazonaws.com/0/24981/b4d87fc1-8b82-4a0e-6211-7d7cc6de233f.jpeg"
"Fig.6 マルチワードマージソートの最初のパス(ストリーム入力無しの場合)" : "https://qiita-image-store.s3.ap-northeast-1.amazonaws.com/0/24981/1fe5c4ae-b438-2c56-3f99-cb196725ab5f.jpeg"
"Fig.7 マルチワードマージソートの最初のパス(ストリーム入力ありの場合)" : "https://qiita-image-store.s3.ap-northeast-1.amazonaws.com/0/24981/c4e8d32c-d926-6b03-75b7-964d9c57dcda.jpeg"
link_list :
- id : "「はじめに」"
title: "「VHDL で書くマージソーター(はじめに)」@Qiita"
url : "https://qiita.com/ikwzm/items/6665b2ef44d878a5b85f"
- id : "「ワードの定義」"
title: "「VHDL で書くマージソーター(ワードの定義)」@Qiita"
url : "https://qiita.com/ikwzm/items/bdcd8876317b908ff492"
- id : "「ワード比較器」"
title: "「VHDL で書くマージソーター(ワード比較器)」@Qiita"
url : "https://qiita.com/ikwzm/items/d5d1dd264b1670f33bd7"
- id : "「ソーティングネットワーク」"
title: "「VHDL で書くマージソーター(ソーティングネットワーク)」@Qiita"
url : "https://qiita.com/ikwzm/items/a1d06e47523759c726ae"
- id : "「バイトニックマージソート」"
title: "「VHDL で書くマージソーター(バイトニックマージソート)」@Qiita"
url : "https://qiita.com/ikwzm/items/366eacbf6a877994c955"
- id : "「バッチャー奇偶マージソート」"
title: "「VHDL で書くマージソーター(バッチャー奇偶マージソート)」@Qiita"
url : "https://qiita.com/ikwzm/items/c21a53f21b87408a7805"
- id : "「シングルワード マージソート ノード」"
title: "「VHDL で書くマージソーター(シングルワード マージソート ノード)」@Qiita"
url : "https://qiita.com/ikwzm/items/7fd7ef9ffc4d9b314fee"
- id : "「マルチワード マージソート ノード」"
title: "「VHDL で書くマージソーター(マルチワード マージソート ノード)」@Qiita"
url : "https://qiita.com/ikwzm/items/ed96b7a44b83bcee4ba5"
- id : "「マージソート ツリー」"
title: "「VHDL で書くマージソーター(マージソート ツリー)」@Qiita"
url : "https://qiita.com/ikwzm/items/1f76ae5cda95aaf92501"
- id : "「端数ワード処理」"
title: "「VHDL で書くマージソーター(端数ワード処理)」@Qiita"
url : "https://qiita.com/ikwzm/items/6b15340f1e05ef03f8d0"
- id : "「ストリーム入力」"
title: "「VHDL で書くマージソーター(ストリーム入力)」@Qiita"
url : "https://qiita.com/ikwzm/items/56e22511021a082a2ccd"
- id : "「ストリームフィードバック」"
title: "「VHDL で書くマージソーター(ストリームフィードバック)」@Qiita"
url : "https://qiita.com/ikwzm/items/e8c59c0ec92956c9355f"
- id : "「ArgSort IP」"
title: "「VHDL で書くマージソーター(ArgSort IP)」@Qiita"
url : "https://qiita.com/ikwzm/items/89fc9542492fca74c9e3"
- id : "「ArgSort-Ultra96」"
title: "「VHDL で書くマージソーター(ArgSort-Ultra96)」@Qiita"
url : "https://qiita.com/ikwzm/items/d58c9b77d038e23ac792"
- id : "「ArgSort-Kv260」"
title: "「VHDL で書くマージソーター(ArgSort-Kv260)」@Qiita"
url : "https://qiita.com/ikwzm/items/ec0f779534c44b35334a"
- id : "ACRi"
title: "アダプティブコンピューティング研究推進体(ACRi)"
url : "https://www.acri.c.titech.ac.jp/wp"
- id : "アダプティブコンピューティング研究推進体(ACRi)"
title: "アダプティブコンピューティング研究推進体(ACRi)"
url : "https://www.acri.c.titech.ac.jp/wp"
- id : "「FPGAを使って基本的なアルゴリズムのソーティングを劇的に高速化(1)」"
title: "「FPGAを使って基本的なアルゴリズムのソーティングを劇的に高速化(1)」"
url : "https://www.acri.c.titech.ac.jp/wordpress/archives/132"
- id : "「FPGAを使って基本的なアルゴリズムのソーティングを劇的に高速化(2)」"
title: "「FPGAを使って基本的なアルゴリズムのソーティングを劇的に高速化(2)」"
url : "https://www.acri.c.titech.ac.jp/wordpress/archives/501"
- id : "「FPGAを使って基本的なアルゴリズムのソーティングを劇的に高速化(3)」"
title: "「FPGAを使って基本的なアルゴリズムのソーティングを劇的に高速化(3)」"
url : "https://www.acri.c.titech.ac.jp/wordpress/archives/2393"
- id : "「FPGAを使って基本的なアルゴリズムのソーティングを劇的に高速化(4)」"
title: "「FPGAを使って基本的なアルゴリズムのソーティングを劇的に高速化(4)」"
url : "https://www.acri.c.titech.ac.jp/wordpress/archives/3888"
- id : "「FPGAを使って基本的なアルゴリズムのソーティングを劇的に高速化(5)」"
title: "「FPGAを使って基本的なアルゴリズムのソーティングを劇的に高速化(5)」"
url : "https://www.acri.c.titech.ac.jp/wordpress/archives/4713"
seg_level : -1
---
|
<reponame>rits-drsl/ZybotR2-96-fpt19<gh_stars>10-100
%YAML 1.2
---
param:
opencv_tm_method: 5 # CV_TM_SQDIFF = 0, CV_TM_SQDIFF_NORMED = 1, CV_TM_CCORR = 2, CV_TM_CCORR_NORMED = 3, CV_TM_CCOEFF = 4, CV_TM_CCOEFF_NORMED = 5
search_range_degree: 10 # 探索する角度の範囲(+-)
nof_divisions: 20 # 角度の分割数(角度分解能 : 2 * search_range_degree / nof_divisions)
max_val_thr: 0.30 # 有効なマッチング結果とするしきい値
|
language: python
dist: xenial
matrix:
include:
- python: 3.5
env: TOX_ENV=py35
- python: 3.6
env: TOX_ENV=py36
- python: 3.7
env: TOX_ENV=py37
install:
# Install riscv-dv using pip to ensure dependencies are downloaded correctly.
- pip install -r requirements.txt
- pip install .
script:
- sphinx-build -E -W -b linkcheck docs/source build
- pip uninstall -y riscv-dv
|
################################################################################
#
# \file .travis.yml
#
# \brief Configuration file for Travis continuous integration
#
# Copyright (c) 2017, B&R Industrial Automation GmbH
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the copyright holders nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL COPYRIGHT HOLDERS BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
################################################################################
language: c
compiler:
- gcc
- clang
env:
global:
- GIT_COMMIT=$TRAVIS_COMMIT
- VERA_ROOT=/home/travis/build/OpenAutomationTechnologies/openPOWERLINK_V2/tools/checkstyle/.vera++/
addons:
apt:
packages:
- libpcap-dev
- cmake-data
- cmake
- qtbase5-dev
- libtcl8.5
- vera++
before_install:
# Execute commit message guidelines check
- chmod +x tools/checkstyle/checkcommit.sh
- ./tools/checkstyle/checkcommit.sh
before_script:
# Print build info that binary is compiled with
- echo $TRAVIS_COMMIT
- echo $TRAVIS_COMMIT_MESSAGE
- echo $TRAVIS_COMMIT_RANGE
- echo $TRAVIS_TAG
- echo $TRAVIS_BRANCH
- echo $TRAVIS_BUILD_NUMBER
- echo $TRAVIS_REPO_SLUG
# Run vera++ coding guidelines check
- cp -r /usr/lib/vera++ tools/checkstyle/.vera++
- cp /usr/bin/vera++ tools/checkstyle/.vera++/vera++
- chmod +x tools/checkstyle/checkoplkstyle.sh
- ./tools/checkstyle/checkoplkstyle.sh
script:
- cd drivers/linux/drv_kernelmod_edrv/build/
- cmake ..
- cd ../../../../stack/build/linux
- cmake ../..
- make install
- cd ../../../apps/demo_mn_console/build/linux
- cmake ../..
- make install
- cd ../../../demo_mn_qt/build/linux
- cmake ../..
- make install
- cd ../../../demo_cn_console/build/linux
- cmake ../..
- make install
|
<gh_stars>10-100
language: c
compiler: clang
matrix:
include:
# works on Precise and Trusty
- os: linux
addons:
addons:
apt:
sources:
- george-edison55-precise-backports
packages:
- cmake-data
- cmake
- os: osx
osx_image: xcode8
#before_script:
before_install:
- eval "${MATRIX_EVAL}"
- cmake .
script:
- cmake --build .
deploy:
- provider: releases
api_key:
secure: "<KEY>
file_glob: true
file: lib/*
skip_cleanup: true
on:
tags: true
repo: iotaledger/ccurl
branch: master
|
description: Featherweight IP RISC-V processor
compatible: "fwrisc"
include: [base.yaml]
properties:
"#address-cells":
const: 1
"#size-cells":
const: 0
|
<reponame>sil2100/Vitis-AI
# Copyright 2019 Xilinx Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
description: resnetv1_152 classifier on ImageNet.
input size: 224*224
float ops: 21.83G
task: classification
framework: tensorflow
prune: 'no'
version: 2.0
files:
- name: tf_resnetv1_152_imagenet_224_224_21.83G_2.0
type: float & quantized
board: GPU
download link: https://www.xilinx.com/bin/public/openDownload?filename=tf_resnetv1_152_imagenet_224_224_21.83G_2.0.zip
checksum: a7656ccff946dab97bf75ae1db54189f
- name: resnet_v1_152_tf
type: xmodel
board: zcu102 & zcu104 & kv260
download link: https://www.xilinx.com/bin/public/openDownload?filename=resnet_v1_152_tf-zcu102_zcu104_kv260-r2.0.0.tar.gz
checksum: f1e2c94e2732d86c5c56c6f8fd7cf30b
- name: resnet_v1_152_tf
type: xmodel
board: vck190
download link: https://www.xilinx.com/bin/public/openDownload?filename=resnet_v1_152_tf-vck190-r2.0.0.tar.gz
checksum: e3caa4063f81a17f5a17a9cd00a72b3f
- name: resnet_v1_152_tf
type: xmodel
board: vck50006pe-DPUCVDX8H-DWC
download link: https://www.xilinx.com/bin/public/openDownload?filename=resnet_v1_152_tf-vck50006pe-DPUCVDX8H-DWC-r2.0.0.tar.gz
checksum: 0daf353767ece6407c4ca8c8578568d0
- name: resnet_v1_152_tf
type: xmodel
board: vck50008pe-DPUCVDX8H
download link: https://www.xilinx.com/bin/public/openDownload?filename=resnet_v1_152_tf-vck50008pe-DPUCVDX8H-r2.0.0.tar.gz
checksum: 702de022087fa24db273a46fe1b72b2e
- name: resnet_v1_152_tf
type: xmodel
board: u50lv-DPUCAHX8H
download link: https://www.xilinx.com/bin/public/openDownload?filename=resnet_v1_152_tf-u50lv-DPUCAHX8H-r2.0.0.tar.gz
checksum: da448432adaa8cafd3f76d42808b2bbb
- name: resnet_v1_152_tf
type: xmodel
board: u50lv-DPUCAHX8H-DWC & u55c-DPUCAHX8H-DWC
download link: https://www.xilinx.com/bin/public/openDownload?filename=resnet_v1_152_tf-u55c-u50lv-DPUCAHX8H-DWC-r2.0.0.tar.gz
checksum: 1b19ba5f6d95036b69b8e52605d64ce6
license: https://github.com/Xilinx/Vitis-AI/blob/master/LICENSE
|
#template_tql < $RDI_TEMPLATES/sdx/sdaccel/swhw/template.tql
description: testinfo generated using import_sdx_test.py script
level: 6
owner: vallina
user:
allowed_test_modes: [sw_emu, hw_emu, hw]
force_makefile: "--force"
host_args: {all: sincos.xclbin}
host_cflags: ' -DDSA64 -DFLOW_HLS_CSIM'
host_exe: host.exe
host_src: test-cl.cpp
kernels:
- {cflags: {all: ' -I.'}, file: test_sincos.xo, ksrc: sincos.cl, name: test_sincos, type: C}
name: 001_basic_sincos
xclbins:
- files: 'test_sincos.xo '
kernels:
- cus: [test_sincos]
name: test_sincos
num_cus: 1
name: sincos.xclbin
|
<gh_stars>10-100
!!omap
- ATIMER_DOWNCOUNTER:
fields: !!omap
- CVAL:
access: rw
description: When equal to zero an interrupt is raised
lsb: 0
reset_value: '0'
width: 16
- ATIMER_PRESET:
fields: !!omap
- PRESETVAL:
access: rw
description: Value loaded in DOWNCOUNTER when DOWNCOUNTER equals zero
lsb: 0
reset_value: '0'
width: 16
- ATIMER_CLR_EN:
fields: !!omap
- CLR_EN:
access: w
description: Writing a 1 to this bit clears the interrupt enable bit in the
ENABLE register
lsb: 0
reset_value: '0'
width: 1
- ATIMER_SET_EN:
fields: !!omap
- SET_EN:
access: w
description: Writing a 1 to this bit sets the interrupt enable bit in the
ENABLE register
lsb: 0
reset_value: '0'
width: 1
- ATIMER_STATUS:
fields: !!omap
- STAT:
access: r
description: A 1 in this bit shows that the STATUS interrupt has been raised
lsb: 0
reset_value: '0'
width: 1
- ATIMER_ENABLE:
fields: !!omap
- ENA:
access: r
description: A 1 in this bit shows that the STATUS interrupt has been enabled
and that the STATUS interrupt request signal is asserted when STAT = 1 in
the STATUS register
lsb: 0
reset_value: '0'
width: 1
- ATIMER_CLR_STAT:
fields: !!omap
- CSTAT:
access: w
description: Writing a 1 to this bit clears the STATUS interrupt bit in the
STATUS register
lsb: 0
reset_value: '0'
width: 1
- ATIMER_SET_STAT:
fields: !!omap
- SSTAT:
access: w
description: Writing a 1 to this bit sets the STATUS interrupt bit in the
STATUS register
lsb: 0
reset_value: '0'
width: 1
|
# Adapted from Garnet and ButterPHY
name: analog_core
commands:
- |
mkdir -p outputs
tar -xvf /home/sjkim85/dragonphy_tarballs/analog_core-latest.tar.gz -C outputs
mv outputs/analog_core-0.9.0/* outputs/
python alt.py
mv outputs/analog_core_alt.lef outputs/analog_core.lef
outputs:
- analog_core.lef
- analog_core.gds
- analog_core.spi
- analog_core.version
|
power.inputs.waveforms_meta: "append"
power.inputs.waveforms:
- "/tools/B/daniels/hammer-tsmc28/build/sim-par-rundir/vcdplus.vpd"
power.inputs.database: "/tools/B/daniels/hammer-tsmc28/build/par-rundir/latest"
power.inputs.saifs_meta: "append"
power.inputs.saifs:
- "/tools/B/daniels/hammer-tsmc28/build/sim-par-rundir/hdc_sensor_fusion.saif"
|
GitBase: ..
TopRoguePackage: KpixDaq
RoguePackages:
- common/python
RogueScripts:
- ../software/scripts/KpixGui
- ../software/scripts/KpixRun
- ../software/scripts/KpixCalibration
- ../software/scripts/KpixPromLoader
- ../software/scripts/KpixServer
- ../software/scripts/KpixClientGui
- ../software/scripts/KpixClientRun
- ../software/scripts/KpixFileReader
- ../software/scripts/KpixMultiRun
CondaDependencies:
- surf=v2.25.0
- matplotlib
- rogue=v5.10.0
RogueConfig:
Targets:
DesyTracker:
ImageDir: targets/DesyTracker/images
Extensions:
- mcs
Releases:
DesyTracker:
Targets:
- DesyTracker
Types:
- Rogue
|
<reponame>wallento/ibex
on: [push, pull_request]
jobs:
lint:
runs-on: ubuntu-latest
name: Linter
strategy:
matrix:
config: ['small']
fail-fast: false
steps:
- name: Checkout
uses: actions/checkout@v1
- name: Test and display fusesoc config for ${{ matrix.config }}
id: config_opts
run: echo "::set-output name=options::$(./util/ibex_config.py ${{ matrix.config }} fusesoc_opts)"
- name: Lint Verilog source files with Verilator for ${{ matrix.config }}
uses: librecores/ci-fusesoc-action@master
with:
command: 'run'
core: 'lowrisc:ibex:ibex_core_tracing'
target: 'lint'
tool: 'verilator'
core-arguments: ${{ steps.config_opts.outputs.options }}
|
name: Artifacts
on:
push:
branches: ['master']
jobs:
artifacts:
name: "Create artifacts"
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: julia-actions/setup-julia@latest
with:
version: '1'
- uses: technote-space/get-diff-action@v5
with:
PATTERNS: |
verilog-stdlib/**/*.v
verilog-stdlib/**/*.sv
- name: Install dependencies
run: |
julia --color=yes --project=. -e 'using Pkg; Pkg.instantiate()'
- name: Build artifact
if: env.GIT_DIFF
run: |
julia --color=yes --project=. -e 'include("build_artifacts.jl")'
git config user.name github-actions
git config user.email <EMAIL>
git add verilog-stdlib.tar.gz Artifacts.toml
git commit -m "Updated artifacts"
git push
|
package:
name: ariane
authors: [ "<NAME> <<EMAIL>>" ]
dependencies:
axi2per: { git: "<EMAIL>:pulp-open/axi2per.git", version: 0.1.0 }
axi_mem_if: { git: "<EMAIL>:pulp-open/axi_mem_if.git", version: 0.1.0 }
axi_node: { git: "<EMAIL>:pulp-open/axi_node.git", version: 1.0.1 }
axi_slice: { git: "<EMAIL>:pulp-open/axi_slice.git", version: 1.1.0 }
axi: { git: "<EMAIL>:fschuiki/axi.git", rev: master }
# bender-vsim: { git: "<EMAIL>:floce/bender-vsim.git", rev: master }
|
name: language-dataset-ci
on:
push:
branches:
- master
pull_request: {}
jobs:
lint-black:
name: lint (black)
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v1
with:
fetch-depth: 1
- uses: actions/setup-python@v1
with:
python-version: '3.7.x'
architecture: 'x64'
- run: pip install black
- run: black --check tools/
lint-data:
name: lint (data)
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v1
with:
fetch-depth: 1
- uses: actions/setup-python@v1
with:
python-version: '3.7.x'
architecture: 'x64'
- run: pip install pipenv
- run: sudo apt install libyaml-dev
- run: pipenv install
- run: pipenv run python -m tools.lint
check-commit:
name: check no pending changes
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v1
with:
fetch-depth: 1
- uses: actions/setup-python@v1
with:
python-version: '3.7.x'
architecture: 'x64'
- run: pip install pipenv
- run: sudo apt install libyaml-dev
- run: pipenv install
- run: pipenv run python -m tools.prepare_commit
- run: git diff --exit-code
test:
name: test
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v1
with:
fetch-depth: 1
- uses: actions/setup-python@v1
with:
python-version: '3.7.x'
architecture: 'x64'
- uses: actions/setup-ruby@v1
with:
ruby-version: '2.4.x'
- run: sudo apt install libyaml-dev cmake pkg-config libicu-dev zlib1g-dev libcurl4-openssl-dev libssl-dev ruby-dev
- run: gem install --no-rdoc --no-ri github-linguist
- run: pip install pipenv
- run: pipenv install --dev
- run: pipenv run pytest tools
|
<filename>blocks/eventpacket/test/test_osd_event_packetization_fixedwidth.manifest.yaml
module: test_osd_event_packetization_fixedwidth
sources:
- ../../../interfaces/common/dii_channel.sv
- ../common/osd_event_packetization.sv
- ../common/osd_event_packetization_fixedwidth.sv
toplevel: osd_event_packetization_fixedwidth
simulators:
- vcs
parameters:
MAX_PKT_LEN: 12
# two packets: packet 1 with 144 bit of payload, and packet 2 with 12 bit of
# payload
DATA_WIDTH: 160
|
description: >-
This is an example of a submission to the
archive which was submitted with a
--label "my_label" tag during the submission
process
experiment: memcached
repositories:
P4Boosters: 7f4905bf
files:
documentation.md: ../memcached/Memcached.md
data: ../memcached/output/
analysis: ../memcached/analysis/
bitstream.tar.gz: /home/iped/1104bit.tar.gz
|
jtag_pulp:
incdirs: [
../../rtl/includes,
]
files: [
src/bscell.sv,
src/jtag_axi_wrap.sv,
src/jtag_enable.sv,
src/jtag_enable_synch.sv,
src/jtagreg.sv,
src/jtag_rst_synch.sv,
src/jtag_sync.sv,
src/tap_top.v,
]
jg_slint_top_name: [
tap_top
]
jg_slint_elab_opt: [
]
jg_slint_postelab_cmds: [
]
jg_slint_clocks: [
tck_i,
]
jg_slint_resets: [
~rst_ni,
]
|
<reponame>mballance/vte<filename>azure-pipelines.yml
variables:
build_num: $(Build.BuildNumber)
jobs:
- job: PyPi_Linux
pool:
vmImage: 'ubuntu-18.04'
steps:
- bash: echo "##vso[task.prependpath]$CONDA/bin"
displayName: Add conda to PATH
- script: |
python -m pip install wheel
python -m pip install twine
displayName: 'Install wheel+twine'
- script: |
python setup.py bdist_wheel --universal
displayName: 'Build Wheel'
- task: TwineAuthenticate@1
condition: eq(variables['Build.SourceBranchName'], 'master')
inputs:
pythonUploadServiceConnection: pypi-vte
- script: |
# Only deploy from master
if test "$(Build.SourceBranchName)" = "master"; then
python -m twine --version
echo "Calling twine"
python -m twine upload -r vte --config-file $(PYPIRC_PATH) dist/*.whl
echo "Calling twine complete"
fi
displayName: 'Upload to PyPi'
|
derived_clks:
tb_emu_io:
abspath: 'tb_i'
emu_clk: 'emu_clk'
emu_rst: 'emu_rst'
emu_dt: 'emu_dt'
dt_req: 'dt_req'
iacore_clk_adc:
abspath: 'tb_i.top_i.iacore'
gated_clk_req: 'clk_adc_val'
gated_clk: 'clk_adc_i'
iacore_clk_indiv:
abspath: 'tb_i.top_i.iacore.iindiv'
emu_clk: 'emu_clk'
emu_rst: 'emu_rst'
iacore_pi0:
abspath: 'tb_i.top_i.iacore.iPI[0].iPI'
emu_dt: 'emu_dt'
dt_req: 'dt_req'
emu_clk: 'emu_clk'
emu_rst: 'emu_rst'
iacore_pi1:
abspath: 'tb_i.top_i.iacore.iPI[1].iPI'
emu_dt: 'emu_dt'
dt_req: 'dt_req'
emu_clk: 'emu_clk'
emu_rst: 'emu_rst'
iacore_pi2:
abspath: 'tb_i.top_i.iacore.iPI[2].iPI'
emu_dt: 'emu_dt'
dt_req: 'dt_req'
emu_clk: 'emu_clk'
emu_rst: 'emu_rst'
iacore_pi3:
abspath: 'tb_i.top_i.iacore.iPI[3].iPI'
emu_dt: 'emu_dt'
dt_req: 'dt_req'
emu_clk: 'emu_clk'
emu_rst: 'emu_rst'
iacore_adc0:
abspath: 'tb_i.top_i.iacore.iADC[0].iADC'
emu_clk: 'emu_clk'
emu_rst: 'emu_rst'
iacore_adc1:
abspath: 'tb_i.top_i.iacore.iADC[1].iADC'
emu_clk: 'emu_clk'
emu_rst: 'emu_rst'
iacore_adc2:
abspath: 'tb_i.top_i.iacore.iADC[2].iADC'
emu_clk: 'emu_clk'
emu_rst: 'emu_rst'
iacore_adc3:
abspath: 'tb_i.top_i.iacore.iADC[3].iADC'
emu_clk: 'emu_clk'
emu_rst: 'emu_rst'
iacore_adc4:
abspath: 'tb_i.top_i.iacore.iADC[4].iADC'
emu_clk: 'emu_clk'
emu_rst: 'emu_rst'
iacore_adc5:
abspath: 'tb_i.top_i.iacore.iADC[5].iADC'
emu_clk: 'emu_clk'
emu_rst: 'emu_rst'
iacore_adc6:
abspath: 'tb_i.top_i.iacore.iADC[6].iADC'
emu_clk: 'emu_clk'
emu_rst: 'emu_rst'
iacore_adc7:
abspath: 'tb_i.top_i.iacore.iADC[7].iADC'
emu_clk: 'emu_clk'
emu_rst: 'emu_rst'
iacore_adc8:
abspath: 'tb_i.top_i.iacore.iADC[8].iADC'
emu_clk: 'emu_clk'
emu_rst: 'emu_rst'
iacore_adc9:
abspath: 'tb_i.top_i.iacore.iADC[9].iADC'
emu_clk: 'emu_clk'
emu_rst: 'emu_rst'
iacore_adc10:
abspath: 'tb_i.top_i.iacore.iADC[10].iADC'
emu_clk: 'emu_clk'
emu_rst: 'emu_rst'
iacore_adc11:
abspath: 'tb_i.top_i.iacore.iADC[11].iADC'
emu_clk: 'emu_clk'
emu_rst: 'emu_rst'
iacore_adc12:
abspath: 'tb_i.top_i.iacore.iADC[12].iADC'
emu_clk: 'emu_clk'
emu_rst: 'emu_rst'
iacore_adc13:
abspath: 'tb_i.top_i.iacore.iADC[13].iADC'
emu_clk: 'emu_clk'
emu_rst: 'emu_rst'
iacore_adc14:
abspath: 'tb_i.top_i.iacore.iADC[14].iADC'
emu_clk: 'emu_clk'
emu_rst: 'emu_rst'
iacore_adc15:
abspath: 'tb_i.top_i.iacore.iADC[15].iADC'
emu_clk: 'emu_clk'
emu_rst: 'emu_rst'
iacore_adcrep0:
abspath: 'tb_i.top_i.iacore.iADCrep0'
emu_clk: 'emu_clk'
emu_rst: 'emu_rst'
iacore_adcrep1:
abspath: 'tb_i.top_i.iacore.iADCrep1'
emu_clk: 'emu_clk'
emu_rst: 'emu_rst'
|
name: Build Docker test image
on:
push:
branches:
- master
paths:
- docker/test/Dockerfile
- docker/lint/Dockerfile
- docker/tcl/Dockerfile
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Build docker test image 📦
shell: bash
run: |
cd docker
cd test && docker build . -t keyiz/hgdb:test && cd ../
cd lint && docker build . -t keyiz/hgdb:lint && cd ../
cd tcl && docker build . -t keyiz/hgdb:tcl && cd ../
docker login -u $DOCKER_USERNAME -p $DOCKER_PASSWORD
docker push keyiz/hgdb:test
docker push keyiz/hgdb:lint
env:
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }}
|
<reponame>bluetiger9/Vitis-AI<gh_stars>100-1000
name: vitis-ai-optimizer_pytorch
channels:
- pytorch
- defaults
dependencies:
- python=3.6
- vai_optimizer_pytorch_gpu
|
name: glb_tile_syn
commands:
- bash get_glb_tile_syn_outputs.sh
outputs:
- glb_tile.syn.v
- glb_tile.syn.sdc
- glb_tile.syn.sdf
- glb_tile.syn.spef
- glb_tile.syn.namemap
|
ePixHr10kT:
enable: True
ForceWrite: False
InitAfterConfig: False
EpixHR:
enable: True
RegisterControl:
enable: True
GlblRstPolarity: True
ClkSyncEn: False
SyncPolarity: False
SyncDelay: 0
SyncWidth: 0
SR0Polarity: False
SR0Delay1: 10
SR0Width1: 16000
ePixAdcSHPeriod: 1
ePixAdcSHOffset: 0
AcqPolarity: False
AcqDelay1: 2400
AcqWidth1: 2400
AcqDelay2: 0
AcqWidth2: 0
R0Polarity: False
R0Delay: 100
R0Width: 24800
PPbePolarity: False
PPbeDelay: 0
PPbeWidth: 0
PpmatPolarity: False
PpmatDelay: 0
PpmatWidth: 0
SaciSyncPolarity: False
SaciSyncDelay: 0
SaciSyncWidth: 0
ResetCounters: False
AsicPwrEnable: False
AsicPwrManual: False
AsicPwrManualDig: False
AsicPwrManualAna: False
AsicPwrManualIo: False
AsicPwrManualFpga: False
DebugSel_TG: AsicR0
DebugSel_MPS: AsicR0
StartupReq: True
|
name: fpu
clock_port: clk
verilog:
- except.v
- fpu.v
- post_norm.v
- pre_norm.v
- pre_norm_fmul.v
- primitives.v
|
<filename>Sources/ospboard/opt/osp/src/kernel/Documentation/devicetree/bindings/rtc/rtc.yaml<gh_stars>0
# SPDX-License-Identifier: GPL-2.0
%YAML 1.2
---
$id: http://devicetree.org/schemas/rtc/rtc.yaml#
$schema: http://devicetree.org/meta-schemas/core.yaml#
title: RTC Generic Binding
maintainers:
- <NAME> <<EMAIL>>
description: |
This document describes generic bindings which can be used to
describe Real Time Clock devices in a device tree.
properties:
$nodename:
pattern: "^rtc(@.*|-[0-9a-f])*$"
aux-voltage-chargeable:
$ref: /schemas/types.yaml#/definitions/uint32
enum: [0, 1]
description: |
Tells whether the battery/supercap of the RTC (if any) is
chargeable or not:
0: not chargeable
1: chargeable
quartz-load-femtofarads:
$ref: /schemas/types.yaml#/definitions/uint32
description:
The capacitive load of the quartz(x-tal), expressed in femto
Farad (fF). The default value shall be listed (if optional),
and likewise all valid values.
start-year:
$ref: /schemas/types.yaml#/definitions/uint32
description:
If provided, the default hardware range supported by the RTC is
shifted so the first usable year is the specified one.
trickle-diode-disable:
$ref: /schemas/types.yaml#/definitions/flag
description:
Do not use internal trickle charger diode. Should be given if
internal trickle charger diode should be disabled.
deprecated: true
trickle-resistor-ohms:
$ref: /schemas/types.yaml#/definitions/uint32
description:
Selected resistor for trickle charger. Should be given
if trickle charger should be enabled.
trickle-voltage-millivolt:
description:
Selected voltage for trickle charger. Should be given
if trickle charger should be enabled and the trickle voltage is different
from the RTC main power supply.
wakeup-source:
$ref: /schemas/types.yaml#/definitions/flag
description:
Enables wake up of host system on alarm.
additionalProperties: true
...
|
<filename>ips_list.yml
#
# List of IPs and relative branch/commit-hash/tag.
# Uses the YAML syntax.
#
# Examples:
#
# or10n:
# commit: tags/PULP3_final
# domain: [cluster]
# udma:
# commit: 62b10440
# domain: [soc]
# axi_slice:
# commit: tags/pulpissimo-v1.0
# domain: [soc,cluster]
# If a *tag* or *commit* is referenced, the IP will be in a
# state of DETACHED HEAD. Before committing any additional
# work, make sure to checkout a branch.
#
common_cells:
commit: v1.21.0
domain: [cluster, soc]
L2_tcdm_hybrid_interco:
commit: v1.0.0
domain: [soc]
cluster_interconnect:
commit: v1.1.1
domain: [cluster, soc]
adv_dbg_if:
commit: v0.0.2
domain: [cluster, soc]
apb/apb:
commit: v0.1.0
domain: [soc]
apb/apb2per:
commit: v0.1.0
domain: [soc]
apb/apb_adv_timer:
commit: v1.0.4
domain: [soc]
apb/apb_fll_if:
commit: v0.1.3
domain: [soc]
apb/apb_gpio:
commit: 0e9f142f2f11278445c953ad011fce1c7ed85b66
domain: [soc]
apb/apb_node:
commit: v0.1.1
domain: [soc]
apb_interrupt_cntrl:
commit: v0.1.1
domain: [soc]
axi/axi:
commit: v0.29.1
domain: [cluster, soc]
axi/axi_slice:
commit: v1.1.4
domain: [cluster, soc]
timer_unit:
commit: v1.0.2
domain: [cluster, soc]
fpnew:
commit: v0.6.6
domain: [cluster, soc]
jtag_pulp:
commit: v0.1
domain: [soc]
cv32e40p:
commit: pulpissimo-v3.4.0-rev3
domain: [cluster, soc]
ibex:
commit: pulpissimo-v6.1.1
group: lowRISC
domain: [cluster, soc]
scm:
commit: v1.0.1
domain: [cluster, soc]
generic_FLL:
commit: 1c92dc73a940392182fd4cb7b86f35649b349595
domain: [soc]
tech_cells_generic:
commit: v0.2.3
domain: [cluster, soc]
udma/udma_core:
commit: v1.1.0
domain: [soc]
udma/udma_uart:
commit: v1.0.1
domain: [soc]
udma/udma_i2c:
commit: v1.0.0
domain: [soc]
udma/udma_i2s:
commit: v1.1.2
domain: [soc]
udma/udma_qspi:
commit: v1.0.4
domain: [soc]
udma/udma_sdio:
commit: v1.1.2
domain: [soc]
udma/udma_camera:
commit: v1.1.2
domain: [soc]
udma/udma_filter:
commit: v1.0.2
domain: [soc]
udma/udma_external_per:
commit: v1.0.3
domain: [soc]
udma_hyper:
commit: 83ab704f9d1c5f9e5353268c901fe95c36bcea36
domain: [soc]
hwpe-mac-engine:
commit: v1.3.3
domain: [cluster, soc]
riscv-dbg:
commit: v0.4.1
domain: [soc]
register_interface:
commit: v0.3.1
domain: [soc]
|
# Private CI trigger. Used to run tooling that can't currently be shared
# publicly.
# The runner used for private CI enforces the use of the template below. All
# build steps need to be placed into the template.
resources:
repositories:
- repository: opentitan-private-ci
type: github
endpoint: lowRISC
name: lowrisc/opentitan-private-ci
extends:
template: jobs.yml@opentitan-private-ci
|
package:
name: pulp
authors:
- "<NAME> <<EMAIL>>"
- "<NAME> <<EMAIL>>"
- "<NAME> <<EMAIL>>"
- "<NAME> <<EMAIL>>"
- "<NAME> <<EMAIL>>"
- "<NAME> <<EMAIL>>"
- "<NAME> <<EMAIL>>"
dependencies:
pulp_soc: { git: "<EMAIL>:micprog/pulp_soc.git", rev: "5847b1d45bceb9cf6275c3468e9a0180c94af3bd" } # To be updated with pulp-platform dependency
pulp_cluster: { git: "<EMAIL>:micprog/pulp_cluster.git", rev: "b07b7249293483fc3a0ae6aec31431a81fee0577" } # To be updated with pulp-platform dependency
tbtools: { git: "<EMAIL>:pulp-platform/tbtools.git", version: 0.2.1 }
export_include_dirs:
- rtl/includes
# workspace:
# checkout_dir: "./ips"
sources:
# Source files grouped in levels. Files in level 0 have no dependencies on files in this
# package. Files in level 1 only depend on files in level 0, files in level 2 on files in
# levels 1 and 0, etc. Files within a level are ordered alphabetically.
# Level 0
- rtl/pulp/jtag_tap_top.sv
- rtl/pulp/pad_control.sv
- rtl/pulp/pad_frame.sv
- rtl/pulp/cluster_domain.sv
- rtl/pulp/soc_domain.sv
- rtl/pulp/rtc_date.sv
- rtl/pulp/rtc_clock.sv
# Level 1
- rtl/pulp/safe_domain.sv
# Level 2
- rtl/pulp/pulp.sv
- target: simulation
files:
- rtl/tb/riscv_pkg.sv
- rtl/tb/jtag_pkg.sv
- rtl/tb/pulp_tap_pkg.sv
- rtl/tb/tb_clk_gen.sv
- rtl/tb/tb_fs_handler.sv
# - rtl/tb/dpi_models/dpi_models.sv
# - rtl/tb/tb_driver/tb_driver.sv
- rtl/tb/SimJTAG.sv
- rtl/tb/SimDTM.sv
- rtl/tb/tb_pulp.sv
# - target: not(synthesis)
# files:
# - rtl/vip/spi_flash/S25fs256s/model/s25fs256s.v
# defines: {"SPEEDSIM"}
# - target: not(synthesis)
# files:
# - rtl/vip/i2c_eeprom/24FC1025.v
# defines: {"SPEEDSIM"}
- target: not(synthesis)
files:
- rtl/vip/i2s/i2c_if.v
- rtl/vip/i2s/i2s_vip_channel.sv
- rtl/vip/i2s/i2s_vip.sv
defines: {"SPEEDSIM"}
- target: not(synthesis)
files:
- rtl/vip/spi_master_padframe.sv
- rtl/vip/uart_tb_rx.sv
- rtl/vip/camera/cam_vip.sv
|
<gh_stars>0
---
file_list:
- "*.vhd"
rule:
constant_004:
case: upper
generic_007:
case: upper
port_025:
disable: false
variable_007:
disable: true
variable_012:
disable: false
signal_007:
disable: true
signal_008:
disable: false
signal_015:
consecutive: 1
...
|
<reponame>JeffDeCola/my-systemverilog-examples<gh_stars>1-10
# my-systemverilog-examples task-readme-github-pages.yml
platform: linux
image_resource:
type: docker-image
source:
repository: golang
tag: 1.7.1
inputs:
- name: my-systemverilog-examples
outputs:
- name: my-systemverilog-examples-updated
run:
path: ./my-systemverilog-examples/ci/scripts/readme-github-pages.sh
# args: [-debug]
|
# Copyright 2019 Xilinx Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
description: 'Person re-identification model (backbone: resnet18).'
input size: 160*80
float ops: 0.95G
task: person reid
framework: caffe
prune: 'no'
version: 2.0
files:
- name: cf_reid_market1501_160_80_0.95G_2.0
type: float & quantized
board: GPU
download link: https://www.xilinx.com/bin/public/openDownload?filename=cf_reid_market1501_160_80_0.95G_1.4.zip
checksum: a42de983533aba4a7ad39b9dcc40b8f9
- name: reid
type: xmodel
board: zcu102 & zcu104 & kv260
download link: https://www.xilinx.com/bin/public/openDownload?filename=reid-zcu102_zcu104_kv260-r2.0.0.tar.gz
checksum: ce8c916e95d491cf8313b107075adec3
- name: reid
type: xmodel
board: vck190
download link: https://www.xilinx.com/bin/public/openDownload?filename=reid-vck190-r2.0.0.tar.gz
checksum: 0466d4ca3c78f5f045562b4bb8df6ad2
- name: reid
type: xmodel
board: vck50006pe-DPUCVDX8H-DWC
download link: https://www.xilinx.com/bin/public/openDownload?filename=reid-vck50006pe-DPUCVDX8H-DWC-r2.0.0.tar.gz
checksum: 727bc214777ffd4bf05ddca6a921e319
- name: reid
type: xmodel
board: vck50008pe-DPUCVDX8H
download link: https://www.xilinx.com/bin/public/openDownload?filename=reid-vck50008pe-DPUCVDX8H-r2.0.0.tar.gz
checksum: 384f22390225fbb31c16f57a0b4c947c
- name: reid
type: xmodel
board: u50lv-DPUCAHX8H
download link: https://www.xilinx.com/bin/public/openDownload?filename=reid-u50lv-DPUCAHX8H-r2.0.0.tar.gz
checksum: 5ae233c5c15cc96f5b58d051300d6875
- name: reid
type: xmodel
board: u50lv-DPUCAHX8H-DWC & u55c-DPUCAHX8H-DWC
download link: https://www.xilinx.com/bin/public/openDownload?filename=reid-u55c-u50lv-DPUCAHX8H-DWC-r2.0.0.tar.gz
checksum: de35a7e60a6ce1ae9eb95fc8af15cde9
license: https://github.com/Xilinx/Vitis-AI/blob/master/LICENSE
|
<filename>.travis.yml<gh_stars>0
language: python
python:
- '2.7'
- '3.8'
install:
- pip install -r requirements.txt
- pip install --editable .
script: ./test.sh
after_success: coveralls
notifications:
slack:
rooms:
secure: <KEY>
|
<filename>rtl/axi_slice/src_files.yml
axi_slice:
files: [
src/axi_single_slice.sv,
src/axi_ar_buffer.sv,
src/axi_aw_buffer.sv,
src/axi_b_buffer.sv,
src/axi_r_buffer.sv,
src/axi_slice.sv,
src/axi_w_buffer.sv,
src/axi_slice_wrap.sv,
]
jg_slint_top_name: [
axi_slice
]
jg_slint_elab_opt: [
]
jg_slint_postelab_cmds: [
]
jg_slint_clocks: [
clk_i,
]
jg_slint_resets: [
~rst_ni,
]
|
<filename>ips/ip/i2c_slave_for_lpc8_polling/i2c_slave_for_lpc8_polling.yml
I2C_SLAVE_FOR_LPC8_POLLING:
IP: __I2C_SLAVE_FOR_LPC8_POLLING
CMD: ICS
ALT: a
scl_slv_polling:
PIN: A34
DIRECTION: IN
sda:
PIN: A36
DIRECTION: INOUT
I2C_SLAVE_FOR_LPC8_POLLING1:
IP: __I2C_SLAVE_FOR_LPC8_POLLING
CMD: IBS
ALT: b
scl_slv_polling:
PIN: A34
DIRECTION: IN
sda:
PIN: A36
DIRECTION: INOUT
|
<gh_stars>0
name: Emscripten Build
on: [push, pull_request]
jobs:
emcc:
runs-on: ubuntu-latest
steps:
- uses: mymindstorm/setup-emsdk@v11
- uses: actions/checkout@v2
- name: Cache sources
id: cache-sources
uses: actions/cache@v2
with:
path: .
key: cache-yosys
- name: Build
run: |
make config-emcc
make YOSYS_VER=latest
- uses: actions/upload-artifact@v2
with:
name: yosysjs
path: yosysjs-latest.zip
|
# This is a basic workflow to help you get started with Actions
name: CI
# Controls when the action will run. Triggers the workflow on push or pull request
# events but only for the main branch
on:
push:
branches: [ main ]
pull_request:
branches: [ main ]
jobs:
build:
# The type of runner that the job will run on
runs-on: ubuntu-latest
# Steps represent a sequence of tasks that will be executed as part of the job
steps:
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
- uses: actions/checkout@v2
- name: Cache Iverilog
id: cache-iverilog
uses: actions/cache@v2
with:
path: /usr/local/bin/iverilog
key: ${{ runner.os }}-iverilog
- name: Install iverilog
if: steps.cache-iverilog.outputs.cache-hit != 'true'
run: |
wget ftp://ftp.icarus.com/pub/eda/verilog/v11/verilog-11.0.tar.gz
sudo tar -zxvf verilog-11.0.tar.gz
cd verilog-11.0
sudo ./configure
sudo make
sudo make install
iverilog -V
echo $(which iverilog)
- name: Install verilator
run: |
echo Installing Verilator
sudo apt-get install verilator
- name: Configure
run: |
sudo mkdir build && cd build
sudo ../configure
sudo make
- name: Test lab1
run: |
echo Testing lab1
cd build
sudo make lab1-imul-check
sudo make lab1-imul-IntMulFL-test && ./lab1-imul-IntMulFL-test
sudo make lab1-imul-IntMulBase-test && ./lab1-imul-IntMulBase-test
sudo make lab1-imul-check && ./lab1-imul-IntMulAlt-test
- name: Test All
run: |
echo Testing all subprojects
cd build
sudo make check
- name: Archive build artifact
uses: actions/upload-artifact@v2
with:
name: build
path: |
build
- name: Repository Dispatch
uses: peter-evans/repository-dispatch@v1
with:
token: ${{ secrets.PERSONAL_ACCESS_TOKEN }}
event-type: test-pass
client-payload: '{"ref": "${{ github.ref }}", "sha": "${{ github.sha }}"}'
|
<reponame>RoaLogic/ahb3lite_apb_bridge<gh_stars>10-100
theme: jekyll-theme-dinky
title: AHB-Lite to APB Bridge
description: Parameterised Asynchronous AHB-Lite to APB Bridge
show_downloads: true
show_license: true
license: Non-Commercial License
|
<reponame>ShreyasR46/Chip_Security_Advanced_Project<gh_stars>0
common_cells_all:
files:
- src/fifo_v1.sv
- src/fifo_v2.sv
- src/fifo_v3.sv
- src/lfsr_8bit.sv
- src/spill_register.sv
- src/stream_register.sv
- src/stream_mux.sv
- src/stream_demux.sv
- src/cdc_2phase.sv
- src/onehot_to_bin.sv
- src/rstgen.sv
- src/rstgen_bypass.sv
- src/edge_propagator_tx.sv
- src/edge_propagator_rx.sv
- src/edge_propagator.sv
- src/lzc.sv
- src/rrarbiter.sv
- src/stream_arbiter.sv
- src/sync_wedge.sv
- src/sync.sv
- src/clk_div.sv
- src/edge_detect.sv
- src/serial_deglitch.sv
- src/counter.sv
- src/mv_filter.sv
- src/popcount.sv
# deprecated modules
- src/deprecated/find_first_one.sv
- src/deprecated/generic_fifo.sv
- src/deprecated/generic_fifo_adv.sv
- src/deprecated/generic_LFSR_8bit.sv
- src/deprecated/pulp_sync_wedge.sv
- src/deprecated/pulp_sync.sv
- src/deprecated/clock_divider.sv
- src/deprecated/clock_divider_counter.sv
|
<reponame>es-ude/elastic-ai.creator
name: Checks
on:
pull_request:
workflow_call:
jobs:
integration-tests:
name: Integration Tests
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions/setup-python@v2
with:
python-version: 3.9
- uses: Gr1N/setup-poetry@v7
- uses: actions/cache@v2
with:
path: ~/.cache/pypoetry/virtualenvs
key: ${{ runner.os }}-poetry-${{ hashFiles('poetry.lock') }}
- run: poetry --version
- run: python --version
- run: poetry install --no-interaction --extras brevitas
- run: poetry run python -m unittest discover elasticai/creator/integrationTests
unit-tests:
name: Unit Tests
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions/setup-python@v2
with:
python-version: 3.9
- uses: Gr1N/setup-poetry@v7
- uses: actions/cache@v2
with:
path: ~/.cache/pypoetry/virtualenvs
key: ${{ runner.os }}-poetry-${{ hashFiles('poetry.lock') }}
- run: poetry --version
- run: python --version
- run: poetry install --no-interaction
- run: cd elasticai/creator/tests
- run: poetry run python -m unittest discover elasticai/creator/tests
lint-commit-messages:
name: Lint Commit Messages
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
with:
fetch-depth: 0
- uses: wagoid/commitlint-github-action@v2
|
<reponame>Charmve/BLE-Security-Att-Def
os: Visual Studio 2017
clone_depth: 1
configuration:
- Release
init:
- C:\"Program Files (x86)"\"Microsoft Visual Studio 14.0"\VC\vcvarsall.bat %PLATFORM%
install:
# Dependencies for libHackRF
- appveyor DownloadFile "https://github.com/libusb/libusb/releases/download/v1.0.22/libusb-1.0.22.7z" -FileName "C:\libusb.7z"
- 7z x -y "C:\libusb.7z" -o"C:\libusb"
- appveyor DownloadFile "http://mirrors.kernel.org/sourceware/pthreads-win32/pthreads-w32-2-9-1-release.zip" -FileName "C:\pthreads-w32-release.zip"
- 7z x -y "C:\pthreads-w32-release.zip" -o"C:\pthreads"
- appveyor DownloadFile "http://ftp.gnome.org/pub/gnome/binaries/win32/dependencies/pkg-config_0.26-1_win32.zip" -FileName "C:\pkg-config_win32.zip"
- 7z x -y "C:\pkg-config_win32.zip" -o"C:\pkg-config"
# FFTW for hackrf_sweep
- curl -fsS -o "C:\fftw-3.3.5.zip" "ftp://ftp.fftw.org/pub/fftw/fftw-3.3.5-dll64.zip"
- 7z x -y "C:\fftw-3.3.5.zip" -o"C:\fftw"
- cd c:\fftw
- ps: lib /machine:x64 /def:libfftw3f-3.def
# ARM GCC for firmware builds
# - appveyor DownloadFile "https://developer.arm.com/-/media/Files/downloads/gnu-rm/6-2017q2/gcc-arm-none-eabi-6-2017-q2-update-win32.zip" -FileName "C:\gcc-arm-none-eabi-win32.zip"
# - 7z x -y "C:\gcc-arm-none-eabi-win32.zip" -o"C:\gcc-arm-none-eabi"
# - set PATH=%PATH%;c:\gcc-arm-none-eabi\bin
build_script:
# Host library and tools
- mkdir c:\projects\hackrf\host\build
- cd c:\projects\hackrf\host\build
- cmake -G "Visual Studio 14 2015 Win64" \
-DLIBUSB_LIBRARIES="C:\libusb\MS64\dll\libusb-1.0.lib" \
-DLIBUSB_INCLUDE_DIR="C:\libusb\include\libusb-1.0" \
-DTHREADS_PTHREADS_INCLUDE_DIR=c:\pthreads\Pre-built.2\include \
-DTHREADS_PTHREADS_WIN32_LIBRARY=c:\pthreads\Pre-built.2\lib\x64\pthreadVC2.lib \
-DPKG_CONFIG_EXECUTABLE="C:\pkg-config\bin\pkg-config.exe" \
-DFFTW_INCLUDES=C:\fftw \
-DFFTW_LIBRARIES=C:\fftw\libfftw3f-3.lib \
..
- msbuild HackRF.sln /logger:"C:\Program Files\AppVeyor\BuildAgent\Appveyor.MSBuildLogger.dll"
# Firmware
# - cd c:\projects\hackrf\
# - git submodule init
# - git submodule update
# - '%CYG_BASH% -lc "cd $APPVEYOR_BUILD_FOLDER && firmware/appveyor.sh"'
after_build:
- 7z a %APPVEYOR_BUILD_FOLDER%\HackRF-Windows-%APPVEYOR_REPO_COMMIT%.zip %APPVEYOR_BUILD_FOLDER%\host\build\libhackrf\src\Release\* %APPVEYOR_BUILD_FOLDER%\host\build\hackrf-tools\src\Release\*
artifacts:
- path: HackRF-Windows-%APPVEYOR_REPO_COMMIT%.zip
name: HackRF-Windows-%APPVEYOR_REPO_COMMIT%
|
<filename>libs/EXTERNAL/capnproto/appveyor.yml
# Cap'n Proto AppVeyor configuration
#
# See https://www.appveyor.com/docs/appveyor-yml/ for configuration options.
#
# This script configures AppVeyor to:
# - Use CMake to ...
# build Cap'n Proto with VS2017.
# build Cap'n Proto samples with VS2017.
# build Cap'n Proto with MinGW.
# build Cap'n Proto with Cygwin.
version: "{build}"
branches:
only:
- master
- /release-.*/
# Don't build non-master branches (unless they open a pull request).
image: Visual Studio 2017
# AppVeyor build worker image (VM template).
shallow_clone: true
# Fetch repository as zip archive.
environment:
MINGW_DIR: C:\mingw-w64\x86_64-7.2.0-posix-seh-rt_v5-rev1\mingw64
BUILD_TYPE: debug
matrix:
# TODO(someday): Add MSVC x64 builds, MinGW x86 build?
- CMAKE_GENERATOR: Visual Studio 15 2017
BUILD_NAME: vs2017
EXTRA_BUILD_FLAGS: # /maxcpucount
# TODO(someday): Right now /maxcpucount occasionally expresses a filesystem-related race:
# capnp-capnpc++ complains that it can't create test.capnp.h.
- CMAKE_GENERATOR: MinGW Makefiles
BUILD_NAME: mingw
EXTRA_BUILD_FLAGS: -j2
- BUILD_NAME: cygwin
install:
- ps: Get-Command sh.exe -All | Remove-Item
# CMake refuses to generate MinGW Makefiles if sh.exe is in the PATH
before_build:
- set PATH=%MINGW_DIR%\bin;%PATH%
- set BUILD_DIR=build-%BUILD_NAME%
- set INSTALL_PREFIX=%CD%\capnproto-c++-%BUILD_NAME%
- cmake --version
build_script:
- echo "Building Cap'n Proto with %CMAKE_GENERATOR%"
- if NOT "%BUILD_NAME%"=="cygwin" cmake -Hc++ -B%BUILD_DIR% -G "%CMAKE_GENERATOR%" -DCMAKE_BUILD_TYPE=%BUILD_TYPE% -DCMAKE_INSTALL_PREFIX=%INSTALL_PREFIX%
- if NOT "%BUILD_NAME%"=="cygwin" cmake --build %BUILD_DIR% --config %BUILD_TYPE% --target install -- %EXTRA_BUILD_FLAGS%
# MinGW wants the build type at configure-time while MSVC wants the build type at build-time. We
# can satisfy both by passing the build type to both cmake invocations. We have to suffer a
# warning, but both generators will work.
- echo "Building Cap'n Proto samples with %CMAKE_GENERATOR%"
- if NOT "%BUILD_NAME%"=="cygwin" cmake -Hc++/samples -B%BUILD_DIR%-samples -G "%CMAKE_GENERATOR%" -DCMAKE_BUILD_TYPE=%BUILD_TYPE% -DCMAKE_PREFIX_PATH=%INSTALL_PREFIX%
- if NOT "%BUILD_NAME%"=="cygwin" cmake --build %BUILD_DIR%-samples --config %BUILD_TYPE%
# Cygwin build -- use super-test.sh like other Unix builds.
# But, we need to install Cygwin's cmake package in order to pass the cmake part of super-test.
# Somewhat ridiculously, this requires downloading Cygwin's setup program and running it.
- if "%BUILD_NAME%"=="cygwin" appveyor DownloadFile "https://cygwin.com/setup-x86_64.exe" -FileName "C:\cygwin64\setup-x86_64.exe"
- if "%BUILD_NAME%"=="cygwin" C:\cygwin64\setup-x86_64.exe --quiet-mode --no-shortcuts --upgrade-also --root "C:\cygwin64" --packages cmake
- if "%BUILD_NAME%"=="cygwin" C:\cygwin64\bin\bash -lc 'cd /cygdrive/c/projects/capnproto; ./super-test.sh -j2 quick'
test_script:
# Sleep a little to prevent interleaving test output with build output.
- if NOT "%BUILD_NAME%"=="cygwin" timeout /t 2
- if NOT "%BUILD_NAME%"=="cygwin" cd %BUILD_DIR%\src
- if NOT "%BUILD_NAME%"=="cygwin" ctest -V -C %BUILD_TYPE%
|
---
project:
description: "Time to Data Converter with Fine Delay"
foundry: "SkyWater"
git_url: "https://github.com/tgingold/OpenTDC.git"
organization: "N/A"
organization_url: "N/A"
owner: "<NAME>"
process: "SKY130"
project_name: "OpenTDC"
tags:
- "Open MPW"
category: "Sensor"
top_level_netlist: "verilog/gl/caravel.v"
user_level_netlist: "verilog/gl/user_project_wrapper.v"
version: "0.1"
cover_image: ""
|
# File auto-generated by Padrick 0.1.0.post0.dev49+g9979c54.dirty
# IPApprox dependencies for alsaqr_periph_padframe
common_cells:
commit: v1.21.0
domain: [cluster, soc]
server: https://github.com
group: pulp-platform
register_interface:
commit: v0.2.1
domain: [soc]
server: https://github.com
group: pulp-platform
axi/axi:
commit: v0.27.0
domain: [cluster, soc]
server: https://github.com
group: pulp-platform
|
sudo: required
language: python
- "3.6"
services:
- docker
script:
- docker pull ghdl/ext:vunit
- docker run -dt --name=vunit_docker ghdl/ext:vunit bash
- docker exec vunit_docker mkdir siaMiner
- docker exec vunit_docker bash -c 'curl -L https://github.com/pedrorivera/SiaFpgaMiner/archive/master.tar.gz | tar xz -C siaMiner'
- docker exec vunit_docker python3 ./siaMiner/SiaFpgaMiner-master/Tools/run_vunit.py
- docker stop vunit_docker
- docker rm vunit_docker
|
<gh_stars>10-100
# Copyright (C) 2019-2021 The SymbiFlow Authors.
#
# Use of this source code is governed by a ISC-style
# license that can be found in the LICENSE file or at
# https://opensource.org/licenses/ISC
#
# SPDX-License-Identifier: ISC
name: ibex
description: Full ibex core test
top_module: ibex_simple_system
tags: ibex
path: third_party/cores/ibex
command: fusesoc --cores-root third_party/cores/ibex run --target=sim --setup lowrisc:ibex:ibex_simple_system --RV32E=0 --RV32M=ibex_pkg::RV32MFast
conf_file: build/lowrisc_ibex_ibex_simple_system_0/sim-verilator/lowrisc_ibex_ibex_simple_system_0.vc
test_file: ibex-sim.sv
timeout: 100
compatible-runners: verilator-uhdm verilator slang
type: parsing elaboration
|
# supported task types are 'classification', 'object_detection' and 'semantic_segmentation'.
task_type: {{ task_type }}
network_name: {{ network_name }}
dataset:
format: {{ dataset_format }}
train_path: {{ train_path }}
test_path: {{ test_path }}
trainer:
batch_size: {{ batch_size }}
epochs: {{ training_epochs }}
common:
image_size:
- {{ image_size[0] }} # height
- {{ image_size[1] }} # width
# set pretrain model name. currently, this feature is not supported, always ignored.
pretrain_model: false
# enable dataset prefetch, set false if weired problem happens
dataset_prefetch: true
|
<reponame>ess-dmsc/dmg-build-scripts
---
- hosts: packet-generator
gather_facts: False
tasks:
- name: start carbon metrics collection
command: "{{daemonize_cmd}} {{script_path}}/pktgen_fpga_config/pktgen_carbon.bash"
tags:
- generator
|
<reponame>francof2a/landsat_soil_classifier
KerasJson: /media/data/projects/landsat_soil_classifier/models/ANN50x50.json
KerasH5: /media/data/projects/landsat_soil_classifier/models/ANN50x50_weights.h5
InputData: /media/data/projects/landsat_soil_classifier/data/sat_x_test.dat
OutputPredictions: /media/data/projects/landsat_soil_classifier/data/sat_y_test.dat
OutputDir: /media/data/projects/landsat_soil_classifier/fpga/hls_minimal
ProjectName: fpga_minimal
XilinxPart: xazu7eg-fbvb900-1-i
ClockPeriod: 24
IOType: io_parallel # options: io_serial/io_parallel
HLSConfig:
Model:
Precision: ap_fixed<24,8>
ReuseFactor: 4
# Strategy: Latency
# Strategy: Resource
# LayerType:
# Dense:
# ReuseFactor: 2
# Strategy: Resource
# Compression: True
|
<gh_stars>0
parameters:
ibex_configs: []
steps:
- ${{ each config in parameters.ibex_configs }}:
# ibex_config.py will exit with error code 1 on any error which will cause
# the CI to fail if there's an issue with the configuration file or an
# incorrect configuration name being used
- bash: |
./util/ibex_config.py ${{ config }} fusesoc_opts
displayName: Display fusesoc config for ${{ config }}
- bash: |
fusesoc --cores-root . run --target=lint lowrisc:ibex:ibex_core_tracing $(./util/ibex_config.py ${{ parameters.ibex_config }} fusesoc_opts)
if [ $? != 0 ]; then
echo -n "##vso[task.logissue type=error]"
echo "Verilog lint failed. Run 'fusesoc --cores-root . run --target=lint lowrisc:ibex:ibex_core_tracing' to check and fix all errors."
exit 1
fi
displayName: Lint Verilog source files with Verilator for ${{ config }}
- bash: |
# Build simulation model of Ibex
fusesoc --cores-root=. run --target=sim --setup --build lowrisc:ibex:ibex_riscv_compliance $(./util/ibex_config.py ${{ parameters.ibex_config }} fusesoc_opts)
if [ $? != 0 ]; then
echo -n "##vso[task.logissue type=error]"
echo "Unable to build Verilator model of Ibex for compliance testing."
exit 1
fi
# Run compliance test suite
export TARGET_SIM=$PWD/build/lowrisc_ibex_ibex_riscv_compliance_0.1/sim-verilator/Vibex_riscv_compliance
export RISCV_PREFIX=riscv32-unknown-elf-
export RISCV_TARGET=ibex
export RISCV_DEVICE=rv32imc
fail=0
for isa in rv32i rv32im rv32imc rv32Zicsr rv32Zifencei; do
make -C build/riscv-compliance RISCV_ISA=$isa 2>&1 | tee run.log
if [ ${PIPESTATUS[0]} != 0 ]; then
echo -n "##vso[task.logissue type=error]"
echo "The RISC-V compliance test suite failed for $isa"
# There's no easy way to get the test results in machine-readable
# form to properly exclude known-failing tests. Going with an
# approximate solution for now.
if [ $isa == rv32i ] && grep -q 'FAIL: 4/48' run.log; then
echo -n "##vso[task.logissue type=error]"
echo "Expected failure for rv32i, see lowrisc/ibex#100 more more information."
else
fail=1
fi
fi
done
exit $fail
displayName: Run RISC-V Compliance test for Ibex RV32IMC for ${{ config }}
|
<filename>dlk/examples/classification/lmnet_quantize_cifar10_max_pooling/config_dlk.yaml
BATCH_SIZE: 1
DATASET:
AUGMENTOR:
lmnet.data_processor.Sequence:
processors:
-
lmnet.data_augmentor.Pad:
bottom: 2
fill: 0
left: 2
right: 2
top: 2
-
lmnet.data_augmentor.Crop:
height: 32
is_resize: false
width: 32
-
lmnet.data_augmentor.FlipLeftRight:
is_bounding_box: false
probability: 0.5
BATCH_SIZE: 100
PRE_PROCESSOR:
lmnet.data_processor.Sequence:
processors:
-
lmnet.pre_processor.Resize:
size:
- 32
- 32
-
lmnet.pre_processor.DivideBy255: {}
DATASET_CLASS: lmnet.datasets.cifar10.Cifar10
IMAGE_SIZE:
- 32
- 32
IS_DEBUG: false
IS_PRETRAIN: false
MAX_STEPS: 70000
NETWORK:
ACTIVATION_QUANTIZER: lmnet.quantizations.linear.linear_mid_tread_half_quantizer
ACTIVATION_QUANTIZER_KWARGS:
bit: 2
max_value: 2
BATCH_SIZE: 100
IMAGE_SIZE:
- 32
- 32
LEARNING_RATE_FUNC: tensorflow.python.training.learning_rate_decay.piecewise_constant
LEARNING_RATE_KWARGS:
boundaries:
- 25000
- 50000
- 75000
values:
- 0.01
- 0.001
- 0.0001
- 1.0e-05
OPTIMIZER_CLASS: tensorflow.python.training.momentum.MomentumOptimizer
OPTIMIZER_KWARGS:
momentum: 0.9
WEIGHT_DECAY_RATE: 0.0005
WEIGHT_QUANTIZER: lmnet.quantizations.binary.binary_mean_scaling_quantizer
WEIGHT_QUANTIZER_KWARGS: {}
NETWORK_CLASS: lmnet.networks.classification.lmnet_quantize.LmnetQuantize
PRETRAIN_DIR: ''
PRETRAIN_FILE: ''
PRETRAIN_VARS: []
SAVE_STEPS: 5000
SUMMARISE_STEPS: 100
TEST_STEPS: 1000
|
variables:
SYNOPSYS_DC: synopsys-2019.12 dc_shell -64bit
before_script:
- export PATH=~/.cargo/bin:$PATH
- mkdir -p build
vsim:
stage: build
script:
- export ARTIFACT="vsim-$VSIM_VER"
- >
case $VSIM_VER in 20*)
export VSIM="questa-$VSIM_VER vsim -64";
export VLIB="questa-$VSIM_VER vlib";
export VLOG="questa-$VSIM_VER vlog -64";
;;
*)
export VSIM="vsim-$VSIM_VER -64";
export VLIB="vlib-$VSIM_VER";
export VLOG="vlog-$VSIM_VER -64";
;;
esac
- >
if ! $CI_PROJECT_DIR/.gitlab-ci.d/memora_retry.sh lookup $ARTIFACT; then
cd build && ../scripts/compile_vsim.sh && mv work{,-$VSIM_VER}
$CI_PROJECT_DIR/.gitlab-ci.d/memora_retry.sh insert $ARTIFACT
fi
parallel:
matrix:
- VSIM_VER: ['10.7b', '10.7e', '2020.1', '2021.1']
synopsys_dc:
stage: build
script:
- >
if ! $CI_PROJECT_DIR/.gitlab-ci.d/memora_retry.sh lookup synopsys_dc; then
cd build && ../scripts/synth.sh
$CI_PROJECT_DIR/.gitlab-ci.d/memora_retry.sh insert synopsys_dc
fi
.run_vsim: &run_vsim
stage: test
script:
- export ARTIFACT="$TEST_MODULE-vsim_$VSIM_VER"
- >
case $VSIM_VER in 20*)
export VSIM="questa-$VSIM_VER vsim -64";
;;
*)
export VSIM="vsim-$VSIM_VER -64";
;;
esac
- >
if ! $CI_PROJECT_DIR/.gitlab-ci.d/memora_retry.sh lookup $ARTIFACT; then
$CI_PROJECT_DIR/.gitlab-ci.d/memora_retry.sh get vsim-$VSIM_VER
cd build
mv work{-$VSIM_VER,}
../scripts/run_vsim.sh --random-seed $TEST_MODULE && touch $ARTIFACT.tested
$CI_PROJECT_DIR/.gitlab-ci.d/memora_retry.sh insert $ARTIFACT
fi
parallel:
matrix:
- VSIM_VER: ['10.7b', '10.7e', '2020.1', '2021.1']
axi_addr_test:
<<: *run_vsim
variables:
TEST_MODULE: axi_addr_test
axi_atop_filter:
<<: *run_vsim
variables:
TEST_MODULE: axi_atop_filter
axi_cdc:
<<: *run_vsim
variables:
TEST_MODULE: axi_cdc
axi_delayer:
<<: *run_vsim
variables:
TEST_MODULE: axi_delayer
axi_dw_downsizer:
<<: *run_vsim
variables:
TEST_MODULE: axi_dw_downsizer
axi_dw_upsizer:
<<: *run_vsim
variables:
TEST_MODULE: axi_dw_upsizer
axi_isolate:
<<: *run_vsim
variables:
TEST_MODULE: axi_isolate
axi_lite_regs:
<<: *run_vsim
variables:
TEST_MODULE: axi_lite_regs
axi_lite_to_apb:
<<: *run_vsim
variables:
TEST_MODULE: axi_lite_to_apb
axi_lite_to_axi:
<<: *run_vsim
variables:
TEST_MODULE: axi_lite_to_axi
axi_lite_mailbox:
<<: *run_vsim
variables:
TEST_MODULE: axi_lite_mailbox
axi_lite_xbar:
<<: *run_vsim
variables:
TEST_MODULE: axi_lite_xbar
axi_modify_address:
<<: *run_vsim
variables:
TEST_MODULE: axi_modify_address
axi_serializer:
<<: *run_vsim
variables:
TEST_MODULE: axi_serializer
axi_sim_mem:
<<: *run_vsim
variables:
TEST_MODULE: axi_sim_mem
axi_to_axi_lite:
<<: *run_vsim
variables:
TEST_MODULE: axi_to_axi_lite
axi_xbar:
<<: *run_vsim
variables:
TEST_MODULE: axi_xbar
|
<reponame>GregAC/opentitan<gh_stars>1000+
# Copyright lowRISC contributors.
# Licensed under the Apache License, Version 2.0, see LICENSE for details.
# SPDX-License-Identifier: Apache-2.0
# Pipeline variables, used by the public and private CI pipelines
# Quote values to ensure they are parsed as string (version numbers might
# end up as float otherwise).
variables:
VERILATOR_VERSION: "4.104"
RISCV_TOOLCHAIN_TAR_VERSION: "20210412-1"
RISCV_TOOLCHAIN_TAR_VARIANT: "lowrisc-toolchain-gcc-rv32imcb"
RISCV_COMPLIANCE_GIT_VERSION: "844c6660ef3f0d9b96957991109dfd80cc4938e2"
VERIBLE_VERSION: "v0.0-1213-g9e5c085"
# lowRISC-internal version numbers of Ibex-specific Spike builds.
SPIKE_IBEX_VERSION: "20201023-git-255bf1cacc599b1413438c269100f3ecd0eb3352"
|
---
input_file : ../akane/12_merge_sort_core_3.akd
output_file : 12_merge_sort_core_3.md
image_url :
"Fig.1 マージソートコアの構成" : "https://qiita-image-store.s3.ap-northeast-1.amazonaws.com/0/24981/e34555ae-cc28-e15e-a2e1-40ad0fc83cdc.jpeg"
"Fig.2 4-way マージソートツリーによる16ワードデータのソート例" : "https://qiita-image-store.s3.ap-northeast-1.amazonaws.com/0/24981/3434ce32-45ec-3aaf-5a04-e097b0da7851.jpeg"
"Fig.3 ストリームフィードバックの動作例(1st PASS-1)" : "https://qiita-image-store.s3.ap-northeast-1.amazonaws.com/0/24981/a4984948-b818-9876-f81e-815eb8a57a7d.jpeg"
"Fig.4 ストリームフィードバックの動作例(1st PASS-2)" : "https://qiita-image-store.s3.ap-northeast-1.amazonaws.com/0/24981/140468c1-cbfe-07bb-a237-af22da1df92d.jpeg"
"Fig.5 ストリームフィードバックの動作例(1st PASS-3)" : "https://qiita-image-store.s3.ap-northeast-1.amazonaws.com/0/24981/17a68f54-dfa0-3cf5-8717-debba4d5e886.jpeg"
"Fig.6 ストリームフィードバックの動作例(1st PASS-4)" : "https://qiita-image-store.s3.ap-northeast-1.amazonaws.com/0/24981/9c66633f-7293-09c4-833c-8f853ed5bb52.jpeg"
"Fig.7 ストリームフィードバックの動作例(2nd PASS)" : "https://qiita-image-store.s3.ap-northeast-1.amazonaws.com/0/24981/7c97d656-c1dd-d1a3-ced1-5b1b410ce3c0.jpeg"
link_list :
- id : "「はじめに」"
title: "「VHDL で書くマージソーター(はじめに)」@Qiita"
url : "https://qiita.com/ikwzm/items/6665b2ef44d878a5b85f"
- id : "「ワードの定義」"
title: "「VHDL で書くマージソーター(ワードの定義)」@Qiita"
url : "https://qiita.com/ikwzm/items/bdcd8876317b908ff492"
- id : "「ワード比較器」"
title: "「VHDL で書くマージソーター(ワード比較器)」@Qiita"
url : "https://qiita.com/ikwzm/items/d5d1dd264b1670f33bd7"
- id : "「ソーティングネットワーク」"
title: "「VHDL で書くマージソーター(ソーティングネットワーク)」@Qiita"
url : "https://qiita.com/ikwzm/items/a1d06e47523759c726ae"
- id : "「バイトニックマージソート」"
title: "「VHDL で書くマージソーター(バイトニックマージソート)」@Qiita"
url : "https://qiita.com/ikwzm/items/366eacbf6a877994c955"
- id : "「バッチャー奇偶マージソート」"
title: "「VHDL で書くマージソーター(バッチャー奇偶マージソート)」@Qiita"
url : "https://qiita.com/ikwzm/items/c21a53f21b87408a7805"
- id : "「シングルワード マージソート ノード」"
title: "「VHDL で書くマージソーター(シングルワード マージソート ノード)」@Qiita"
url : "https://qiita.com/ikwzm/items/7fd7ef9ffc4d9b314fee"
- id : "「マルチワード マージソート ノード」"
title: "「VHDL で書くマージソーター(マルチワード マージソート ノード)」@Qiita"
url : "https://qiita.com/ikwzm/items/ed96b7a44b83bcee4ba5"
- id : "「マージソート ツリー」"
title: "「VHDL で書くマージソーター(マージソート ツリー)」@Qiita"
url : "https://qiita.com/ikwzm/items/1f76ae5cda95aaf92501"
- id : "「端数ワード処理」"
title: "「VHDL で書くマージソーター(端数ワード処理)」@Qiita"
url : "https://qiita.com/ikwzm/items/6b15340f1e05ef03f8d0"
- id : "「ストリーム入力」"
title: "「VHDL で書くマージソーター(ストリーム入力)」@Qiita"
url : "https://qiita.com/ikwzm/items/56e22511021a082a2ccd"
- id : "「ストリームフィードバック」"
title: "「VHDL で書くマージソーター(ストリームフィードバック)」@Qiita"
url : "https://qiita.com/ikwzm/items/e8c59c0ec92956c9355f"
- id : "「ArgSort IP」"
title: "「VHDL で書くマージソーター(ArgSort IP)」@Qiita"
url : "https://qiita.com/ikwzm/items/89fc9542492fca74c9e3"
- id : "「ArgSort-Ultra96」"
title: "「VHDL で書くマージソーター(ArgSort-Ultra96)」@Qiita"
url : "https://qiita.com/ikwzm/items/d58c9b77d038e23ac792"
- id : "「ArgSort-Kv260」"
title: "「VHDL で書くマージソーター(ArgSort-Kv260)」@Qiita"
url : "https://qiita.com/ikwzm/items/ec0f779534c44b35334a"
- id : "ACRi"
title: "アダプティブコンピューティング研究推進体(ACRi)"
url : "https://www.acri.c.titech.ac.jp/wp"
- id : "アダプティブコンピューティング研究推進体(ACRi)"
title: "アダプティブコンピューティング研究推進体(ACRi)"
url : "https://www.acri.c.titech.ac.jp/wp"
- id : "「FPGAを使って基本的なアルゴリズムのソーティングを劇的に高速化(1)」"
title: "「FPGAを使って基本的なアルゴリズムのソーティングを劇的に高速化(1)」"
url : "https://www.acri.c.titech.ac.jp/wordpress/archives/132"
- id : "「FPGAを使って基本的なアルゴリズムのソーティングを劇的に高速化(2)」"
title: "「FPGAを使って基本的なアルゴリズムのソーティングを劇的に高速化(2)」"
url : "https://www.acri.c.titech.ac.jp/wordpress/archives/501"
- id : "「FPGAを使って基本的なアルゴリズムのソーティングを劇的に高速化(3)」"
title: "「FPGAを使って基本的なアルゴリズムのソーティングを劇的に高速化(3)」"
url : "https://www.acri.c.titech.ac.jp/wordpress/archives/2393"
- id : "「FPGAを使って基本的なアルゴリズムのソーティングを劇的に高速化(4)」"
title: "「FPGAを使って基本的なアルゴリズムのソーティングを劇的に高速化(4)」"
url : "https://www.acri.c.titech.ac.jp/wordpress/archives/3888"
- id : "「FPGAを使って基本的なアルゴリズムのソーティングを劇的に高速化(5)」"
title: "「FPGAを使って基本的なアルゴリズムのソーティングを劇的に高速化(5)」"
url : "https://www.acri.c.titech.ac.jp/wordpress/archives/4713"
seg_level : -1
---
|
KerasJson: /media/data/projects/landsat_soil_classifier/models/ANN50x50th.json
KerasH5: /media/data/projects/landsat_soil_classifier/models/ANN50x50th_weights.h5
InputData: /media/data/projects/landsat_soil_classifier/data/sat_x_test.dat
OutputPredictions: /media/data/projects/landsat_soil_classifier/data/sat_y_test.dat
OutputDir: /media/data/projects/landsat_soil_classifier/fpga/hls_ANN50x50th
ProjectName: fpga_ANN50x50th
XilinxPart: xazu7eg-fbvb900-1-i
ClockPeriod: 24.0
IOType: io_parallel
HLSConfig:
Model:
Precision: ap_fixed<24,8>
ReuseFactor: 4
|
<gh_stars>0
fpnew:
incdirs: [
../common_cells/include,
]
files: [
src/fpnew_pkg.sv,
src/fpnew_cast_multi.sv,
src/fpnew_classifier.sv,
src/fpnew_divsqrt_multi.sv,
src/fpnew_fma.sv,
src/fpnew_fma_multi.sv,
src/fpnew_noncomp.sv,
src/fpnew_opgroup_block.sv,
src/fpnew_opgroup_fmt_slice.sv,
src/fpnew_opgroup_multifmt_slice.sv,
src/fpnew_pipe_in.sv,
src/fpnew_pipe_out.sv,
src/fpnew_pipe_inside_fma.sv,
src/fpnew_pipe_inside_cast.sv,
src/fpnew_rounding.sv,
src/fpnew_top.sv,
]
|
env:
runner:
type:
"runTestTestRunner"
common_compile_option: &common_compile >-
-sverilog
-ntb_opts uvm-1.2
common_sim_option: &common_sim >-
+UVM_VERBOSITY=UVM_LOW
+UVM_CONFIG_DB_TRACE
builds:
build2:
compile_option:
- *common_compile
- -timescale=1ns/10ps
pre_sim_action:
- echo "pre_sim_build2"
sim_option:
- *common_sim
post_sim_action:
- echo "post_sim_build2"
options:
vh:
on_action:
sim_option:
- +UVM_VERBOSITY=UVM_FULL
with_value_action:
sim_option:
- +UVM_VERBOSITY=$vh
groups:
group3:
build: build2
args:
- -vh
- -repeat 20
tests:
- test1:
groups:
- group2
- group1
|
<reponame>ess-dmsc/dmg-build-scripts
- name: packages are installed
become: yes
package:
name:
- daemonize
- psmisc
state: present
- name: destination folder exists
file:
path: ~/deployment
state: directory
- name: old binaries are deleted
file:
path: ~/deployment/event-formation-unit
state: absent
- name: packages are installed
unarchive:
remote_src: yes
src: https://jenkins.esss.dk/dm/job/ess-dmsc/job/event-formation-unit/job/master/lastSuccessfulBuild/artifact/event-formation-unit-centos7.tar.gz
dest: "~/deployment"
- name: git repository is available
git:
repo: 'https://github.com/ess-dmsc/integration-test.git'
dest: "~/integration-test"
|
name: vitis-ai-caffe
channels:
- conda-forge
- anaconda
dependencies:
- python=3.6
- caffe_decent
- vaic
- vart
- rt-engine
|
<reponame>youssefkandil/Dynamic_Power_Clock_Gating-<filename>environment.yml
name: Dynamic_power_clk_gate
channels:
- litex-hub
- conda-forge
dependencies:
- openroad
- iverilog
- yosys
- python
- pip
- pytest
- pip:
- pandas
|
on: [push, pull_request]
name: CI
jobs:
check:
name: Check
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
name: "source checkout"
- uses: actions-rs/toolchain@v1
name: "toolchain installation"
with:
profile: minimal
toolchain: stable
override: true
- uses: actions-rs/cargo@v1
name: "cargo check"
with:
command: check
lints:
name: Lints
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
name: "source checkout"
- uses: actions-rs/toolchain@v1
name: "toolchain installation"
with:
profile: minimal
toolchain: stable
override: true
components: rustfmt, clippy
- uses: actions-rs/cargo@v1
name: "cargo fmt"
with:
command: fmt
args: --all -- --check
- uses: actions-rs/cargo@v1
name: "cargo clippy"
continue-on-error: true # TODO: Remove once we have fixed the mall
with:
command: clippy
test:
name: Tests
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
name: "source checkout"
with:
submodules: recursive
- uses: actions-rs/toolchain@v1
name: "toolchain installation"
with:
profile: minimal
toolchain: stable
override: true
- uses: actions-rs/cargo@v1
name: "cargo test"
with:
command: test
args: --all
- run: python3 scripts/test.py --debug --verbose
name: "regression tests"
|
<filename>models/AI-Model-Zoo/model-list/tf_yolov3_voc_416_416_65.63G_1.4/model.yaml
# Copyright 2019 Xilinx Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
description: YOLOv3 on VOC.
input size: 416*416
float ops: 65.63G
task: detection
framework: tensorflow
prune: 'no'
version: 1.4
files:
- name: tf_yolov3_voc_416_416_65.63G_1.4
type: float & quantized
board: GPU
download link: https://www.xilinx.com/bin/public/openDownload?filename=tf_yolov3_voc_416_416_65.63G_1.4.zip
checksum: 69e94431a3846868f550ac1f6884f076
- name: yolov3_voc_tf
type: xmodel
board: zcu102 & zcu104 & kv260
download link: https://www.xilinx.com/bin/public/openDownload?filename=yolov3_voc_tf-zcu102_zcu104_kv260-r1.4.0.tar.gz
checksum: 8ece8195c006a5ab1c81e1da1ed25edd
- name: yolov3_voc_tf
type: xmodel
board: vck190
download link: https://www.xilinx.com/bin/public/openDownload?filename=yolov3_voc_tf-vck190-r1.4.0.tar.gz
checksum: 5f7d13975202ba363e55df7e3fcf6e07
- name: yolov3_voc_tf
type: xmodel
board: vck5000
download link: https://www.xilinx.com/bin/public/openDownload?filename=yolov3_voc_tf-vck5000-DPUCVDX8H-r1.4.0.tar.gz
checksum: 1892636c4cdd4cf4b6e85af93734054e
- name: yolov3_voc_tf
type: xmodel
board: u50-DPUCAHX8H & u50lv-DPUCAHX8H & u280-DPUCAHX8H
download link: https://www.xilinx.com/bin/public/openDownload?filename=yolov3_voc_tf-u50-u50lv-u280-DPUCAHX8H-r1.4.0.tar.gz
checksum: 172e9779db87b6c1872f4d7f1d02e5df
- name: yolov3_voc_tf
type: xmodel
board: u200-DPUCADF8H & u250-DPUCADF8H
download link: https://www.xilinx.com/bin/public/openDownload?filename=yolov3_voc_tf-u200-u250-r1.4.0.tar.gz
checksum: 39fe239726a583e2f24616f3f89c902d
license: https://github.com/Xilinx/Vitis-AI/blob/master/LICENSE
|
common_compile_option: &common_compile >-
-sverilog
-ntb_opts uvm-1.1
-timescale=1ns/10ps
-f $JVSUK_HOME/etc/jvs.f
builds:
jvs_memory:
test_discoverer:
type: "uvm_test"
attr:
test_dir: $JVSUK_HOME/testcases/jvs_memory
compile_option:
- *common_compile
- $JVSUK_HOME/tb/jvs_memory_tb.sv -top jvs_memory_tb
sim_option:
- +UVM_VERBOSITY=UVM_HIGH
jvs_irq:
test_discoverer:
type: "uvm_test"
attr:
test_dir: $JVSUK_HOME/testcases/jvs_irq
compile_option:
- *common_compile
- $JVSUK_HOME/tb/jvs_irq_tb.sv -top jvs_irq_tb
sim_option:
- +UVM_VERBOSITY=UVM_HIGH
jvs_register_region:
test_discoverer:
type: "uvm_test"
attr:
test_dir: $JVSUK_HOME/testcases/jvs_register_region
compile_option:
- *common_compile
- $JVSUK_HOME/tb/jvs_register_region_tb.sv -top jvs_register_region_tb
sim_option:
- +UVM_VERBOSITY=UVM_HIGH
jvs_clk_rst_group:
test_discoverer:
type: "uvm_test"
attr:
test_dir: $JVSUK_HOME/testcases/jvs_clk_rst_group
compile_option:
- *common_compile
- $JVSUK_HOME/tb/jvs_clk_rst_group_tb.sv -top jvs_clk_rst_group_tb
sim_option:
- +UVM_VERBOSITY=UVM_HIGH
groups:
jvs_memory:
build:
jvs_memory
tests:
- jvs_memory_showcase:
jvs_irq:
build:
jvs_irq
tests:
- jvs_int_simple_test:
- jvs_msi_irq_test:
- jvs_soft_irq_test:
jvs_register_region:
build:
jvs_register_region
tests:
- jvs_register_region_test:
jvs_clk_rst_group:
build:
jvs_clk_rst_group
tests:
- jvs_clk_rst_group_basic_test:
jvs:
groups:
- jvs_memory
- jvs_irq
- jvs_register_region
- jvs_clk_rst_group
|
#
# List of IPs and relative branch/commit-hash/tag.
# Uses the YAML syntax.
#
# Examples:
#
# or10n:
# commit: tags/PULP3_final
# domain: [cluster]
# udma:
# commit: 62b10440
# domain: [soc]
# axi_slice:
# commit: tags/pulpissimo-v1.0
# domain: [soc,cluster]
# If a *tag* or *commit* is referenced, the IP will be in a
# state of DETACHED HEAD. Before committing any additional
# work, make sure to checkout a branch.
#
L2_tcdm_hybrid_interco:
commit: pulpissimo-v1.0
adv_dbg_if:
commit: v0.0.1
apb/apb2per:
commit: v0.0.1
apb/apb_adv_timer:
commit: v1.0.2
apb/apb_fll_if:
commit: pulpissimo-v1.0
apb/apb_gpio:
commit: v0.2.0
apb/apb_node:
commit: v0.1.1
apb_interrupt_cntrl:
commit: v0.0.1
axi/axi:
commit: v0.7.1
axi/axi_node:
commit: v1.1.4
axi/axi_slice:
commit: v1.1.4
axi/axi_slice_dc:
commit: v1.1.3
axi/axi_mem_if:
commit: v0.2.0
timer_unit:
commit: v1.0.2
common_cells:
commit: v1.13.1
fpnew:
commit: v0.6.1
jtag_pulp:
commit: v0.1
riscv:
commit: pulpissimo-v3.4.0
ibex:
commit: 13313952cd50ff04489f6cf3dba9ba05c2011a8b
group: lowRISC
scm:
commit: v1.0.1
generic_FLL:
commit: v0.1
tech_cells_generic:
commit: v0.1.6
udma/udma_core:
commit: v1.0.0
udma/udma_uart:
commit: v1.0.0
udma/udma_i2c:
commit: vega_v1.0.0
udma/udma_i2s:
commit: v1.0.0
udma/udma_qspi:
commit: v1.0.0
udma/udma_sdio:
commit: vega_v1.0.5
udma/udma_camera:
commit: v1.0.0
udma/udma_filter:
commit: v1.0.0
udma/udma_external_per:
commit: v1.0.0
hwpe-mac-engine:
commit: v1.2
riscv-dbg:
commit: v0.2
|
<gh_stars>0
# @package _global_
do_blink:
backend: vivado
figure: fig2
sub_figure: b
device: 6eg
part: xczu6eg-ffvb1156-2-e
num_luts: 8640
bft: bft8
use_abs: True
|
<filename>.travis.yml
#=========================================================================
# .travis.yml
#=========================================================================
# Continuous integration with TravisCI
language: python
python:
- "3.6"
#-------------------------------------------------------------------------
# Install
#-------------------------------------------------------------------------
install:
- sudo apt-get install -y graphviz
# For some reason, the latest setuptools (46.1.1) causes permission
# denied errors when trying to access site packages (to access scripts
# like mflowgen-status), so we are pinning to 46.0.0 and waiting to see
# if they will fix it on their end.
#
# Also need to pin pip (20.0.2) because (20.1) makes pyupgrade exit 1
# for whatever reason. We are pinning for now and waiting to see if they
# fix it on their end.
- pip install --upgrade pip==20.0.2 setuptools==46.0.0 twine
- pip install --requirement requirements/ci.txt
- pip install .
- pip list
#-------------------------------------------------------------------------
# Tests
#-------------------------------------------------------------------------
script:
# Formatting checks
- autoflake --recursive --in-place --remove-duplicate-keys .
- pyupgrade --py3-only --keep-percent-format $(find . -name '*.py')
- flake8 --select=F --ignore=F401,F405,F403,F811,F821,F841
- git diff --exit-code
# New format check: every 'glob' in a TCL script must include
# 'lsort' to avoid non-determinism.
#
# Explanation:
# glob by itself returns a list in random order. For determinate order,
# precede the glob with an lsort, as recommended by TCL 'glob' man page.
#
# Example: given a directory with files 'cells-lvt' and 'cells',
# [glob cells*] => { cells cells-lvt } ? OR SOMETIMES ? { cells-lvt cells }
# [lsort [glob cells*]] => { cells cells-lvt } ALWAYS
#
# So what could go wrong? E.g. a "multivt" view might have
# two libraries "stdcells-tt.lib" and "stdcells-bc.lib" such that
# the same cell name appears in both libraries but with different
# characteristics. If the designer loads the libraries using e.g.
#
# set lib_list [glob stdcells*.lib]
#
# they might sometimes get the typical-case cell and other times get
# the worst-case cell, leading to erratic and unexpected behavior.
- echo Every '[glob' command must be preceded by '[lsort' or this test will fail.
- |
exit_status=0
for f in `find * -name \*.tcl`; do
ERR=
cat -n $f | grep '\[glob ' | grep -v '\[lsort ' > /tmp/tmp$$ && ERR=true
if [ "$ERR" ]; then
echo BAD FILE $f; cat /tmp/tmp$$; exit_status=13; echo ""
/bin/rm /tmp/tmp$$
fi
done
exit $exit_status
# Test that we can configure and run the common targets
- which mflowgen-python
- mflowgen run --demo
- cd mflowgen-demo && mkdir -p build && cd build
- mflowgen run --design ../GcdUnit
- make list
- make status
- make runtimes
- make graph
- make clean-all
- make info
# - py.test ../mflowgen/tests
#-------------------------------------------------------------------------
# After success
#-------------------------------------------------------------------------
after_success:
- echo done
|
<reponame>ikwzm/merge_sorter
---
input_file : 13_argsort.akd
output_file : ../13_argsort.md
image_url :
"Fig.1 マージソートコアの構成" : "image/13_argsort_1.jpg"
"Fig.2 ArgSort IPの構成" : "image/13_argsort_2.jpg"
"Fig.3 ArgSort IPの動作" : "image/13_argsort_3.jpg"
"Fig.4 ArgSort IP のプレビュー" : "image/13_argsort_4.jpg"
"Fig.5 ArgSort IP CORE Parameters" : "image/13_argsort_5.jpg"
"Fig.6 ArgSort IP STM-AXI Parameters" : "image/13_argsort_6.jpg"
"Fig.7 ArgSort IP MRG-AXI Parameters" : "image/13_argsort_7.jpg"
"Fig.8 ArgSort IP CSR-AXI Parameters" : "image/13_argsort_8.jpg"
link_list :
- id : "「はじめに」"
title: "「VHDL で書くマージソーター(はじめに)」"
url : "./01_introduction.md"
- id : "「ワードの定義」"
title: "「VHDL で書くマージソーター(ワードの定義)」"
url : "./02_word_package.md"
- id : "「ワード比較器」"
title: "「VHDL で書くマージソーター(ワード比較器)」"
url : "./03_word_compare.md"
- id : "「ソーティングネットワーク」"
title: "「VHDL で書くマージソーター(ソーティングネットワーク)」"
url : "./04_sorting_network.md"
- id : "「バイトニックマージソート」"
title: "「VHDL で書くマージソーター(バイトニックマージソート)」"
url : "./05_bitonic_sorter.md"
- id : "「バッチャー奇偶マージソート」"
title: "「VHDL で書くマージソーター(バッチャー奇偶マージソート)」"
url : "./06_oddeven_sorter.md"
- id : "「シングルワード マージソート ノード」"
title: "「VHDL で書くマージソーター(シングルワード マージソート ノード)」"
url : "./07_merge_sort_node_single.md"
- id : "「マルチワード マージソート ノード」"
title: "「VHDL で書くマージソーター(マルチワード マージソート ノード)」"
url : "./08_merge_sort_node_multi.md"
- id : "「マージソート ツリー」"
title: "「VHDL で書くマージソーター(マージソート ツリー)」"
url : "./09_merge_sort_tree.md"
- id : "「端数ワード処理」"
title: "「VHDL で書くマージソーター(端数ワード処理)」"
url : "./10_merge_sort_core_1.md"
- id : "「ストリーム入力」"
title: "「VHDL で書くマージソーター(ストリーム入力)」"
url : "./11_merge_sort_core_2.md"
- id : "「ストリームフィードバック」"
title: "「VHDL で書くマージソーター(ストリームフィードバック)」"
url : "./12_merge_sort_core_3.md"
- id : "「ArgSort IP」"
title: "「VHDL で書くマージソーター(ArgSort IP)」"
url : "./13_argsort.md"
- id : "「ArgSort-Ultra96」"
title: "「VHDL で書くマージソーター(ArgSort-Ultra96)」"
url : "https://github.com/ikwzm/ArgSort-Ultra96/blob/1.2.1/doc/ja/argsort-ultra96.md"
- id : "「ArgSort-Kv260」"
title: "「VHDL で書くマージソーター(ArgSort-Kv260)」"
url : "https://github.com/ikwzm/ArgSort-Kv260/blob/1.2.1/doc/ja/argsort-Kv260.md"
|
# Copyright 2019 Xilinx Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
description: Yolox-m on COCO
task: Traffic Sign Detection
framework: pytorch
prune: 'no'
version: 2.0
input size: 3*640*640
float ops: 73G
files:
- name: pt_yolox_TT100K_640_640_73G_2.0
type: float & quantized
board: GPU
download link: https://www.xilinx.com/bin/public/openDownload?filename=pt_yolox_TT100K_640_640_73G_2.0.zip
checksum: a00e64f7a256ff05fe81a6b2e74b8a32
- name: tsd_yolox_pt
type: xmodel
board: zcu102 & zcu104 & kv260
download link: https://www.xilinx.com/bin/public/openDownload?filename=tsd_yolox_pt-zcu102_zcu104_kv260-r2.0.0.tar.gz
checksum: 0dbb61f852850d3f7d0d14e26bdb7b0d
- name: tsd_yolox_pt
type: xmodel
board: vck190
download link: https://www.xilinx.com/bin/public/openDownload?filename=tsd_yolox_pt-vck190-r2.0.0.tar.gz
checksum: 3aaeeb872ae09c51697e94727e90d3e1
- name: tsd_yolox_pt
type: xmodel
board: vck50006pe-DPUCVDX8H-DWC
download link: https://www.xilinx.com/bin/public/openDownload?filename=tsd_yolox_pt-vck50006pe-DPUCVDX8H-DWC-r2.0.0.tar.gz
checksum: a2783d60cc96aef974d42b091ce1414e
- name: tsd_yolox_pt
type: xmodel
board: u50lv-DPUCAHX8H
download link: https://www.xilinx.com/bin/public/openDownload?filename=tsd_yolox_pt-u50lv-DPUCAHX8H-r2.0.0.tar.gz
checksum: 5bf13b5daa587839919aea30b2819c63
- name: tsd_yolox_pt
type: xmodel
board: u50lv-DPUCAHX8H-DWC & u55c-DPUCAHX8H-DWC
download link: https://www.xilinx.com/bin/public/openDownload?filename=tsd_yolox_pt-u55c-u50lv-DPUCAHX8H-DWC-r2.0.0.tar.gz
checksum: 791311aa334bdcf62e8eef7c7d588ed5
license: https://github.com/Xilinx/Vitis-AI/blob/master/LICENSE
|
<reponame>ben-marshall/uc64
os : linux
language: c
dist : bionic
env :
global:
- TOOLS_DIR: ~/tools
- VERILATOR_ROOT: ~/tools/verilator
- YOSYS_ROOT: ~/tools/yosys
- RISCV: ~/tools/riscv64-unknown-elf
before_install :
- sudo apt-get --yes install flex bison libfl2 libfl-dev perl build-essential clang bison flex libreadline-dev gawk tcl-dev libffi-dev graphviz xdot pkg-config python3 libboost-system-dev libboost-python-dev libboost-filesystem-dev zlib1g-dev
jobs:
include:
- name: "Confidence Checks - Core"
script:
- source ./bin/install-verilator.sh
- source ./bin/install-toolchain.sh
- source ./bin/conf.sh
- make -B build-core_top
- make run-unit-core-example
- make run-unit-core-branch-tgt-trap
- make run-unit-core-imem-access-trap
- make run-unit-core-mul
- make run-unit-core-div
- name: "Confidence Checks - CCX"
script:
- source ./bin/install-verilator.sh
- source ./bin/install-toolchain.sh
- source ./bin/conf.sh
- make -B build-ccx_top
- make -B build-unit-tests-ccx
- make -B run-unit-tests-ccx
- name: "Arch Tests - I"
script:
- source ./bin/install-verilator.sh
- source ./bin/install-toolchain.sh
- source ./bin/conf.sh
- make -B build-ccx_top
- make arch-test-verify-croyde-I
- name: "Arch Tests - M"
script:
- source ./bin/install-verilator.sh
- source ./bin/install-toolchain.sh
- source ./bin/conf.sh
- make -B build-ccx_top
- make arch-test-verify-croyde-M
- name: "Arch Tests - C"
script:
- source ./bin/install-verilator.sh
- source ./bin/install-toolchain.sh
- source ./bin/conf.sh
- make -B build-ccx_top
- make arch-test-verify-croyde-C
- name: "Designer Assertions"
script:
- source ./bin/install-yosys.sh
- source ./bin/install-symbiyosys.sh
- source ./bin/install-boolector.sh
- source ./bin/conf.sh
- make -j $(nproc) da-prove-all
- name: "riscv-formal"
script:
- source ./bin/install-yosys.sh
- source ./bin/install-symbiyosys.sh
- source ./bin/install-boolector.sh
- source ./bin/conf.sh
- make riscv-formal-clean riscv-formal-prepare
- export PATH=$YOSYS_ROOT:$PATH
- make -C work/core/riscv-formal insn_add_ch0
- make -C work/core/riscv-formal insn_beq_ch0
- make -C work/core/riscv-formal insn_ld_ch0
- make -C work/core/riscv-formal insn_sd_ch0
- name: "Synthesis - Core"
script:
- source ./bin/install-yosys.sh
- source ./bin/conf.sh
- make synthesise-cmos
- name: "Benchmarks - Embench IoT"
script:
- source ./bin/install-verilator.sh
- source ./bin/install-toolchain.sh
- source ./bin/conf.sh
- make build-ccx_top
- make build-embench-binaries
- make build-embench-targets
- make -j 2 run-embench-targets
- grep -rn ">> Finished after .* clock cycles" work/embench/src/ | sort
cache :
directories:
- ~/tools/verilator
- ~/tools/yosys
- ~/tools/symbiyosys
- ~/tools/boolector
- ~/tools/riscv64-unknown-elf
|
<filename>software/mesa/appveyor.yml<gh_stars>1-10
# http://www.appveyor.com/docs/appveyor-yml
#
# To setup AppVeyor for your own personal repositories do the following:
# - Sign up
# - Add a new project
# - Select Git and fill in the Git clone URL
# - Setup a Git hook as explained in
# https://github.com/appveyor/webhooks#installing-git-hook
# - Check 'Settings > General > Skip branches without appveyor.yml'
# - Check 'Settings > General > Rolling builds'
# - Setup the global or project notifications to your liking
#
# Note that kicking (or restarting) a build via the web UI will not work, as it
# will fail to find appveyor.yml . The Git hook is the most practical way to
# kick a build.
#
# See also:
# - http://help.appveyor.com/discussions/problems/2209-node-grunt-build-specify-a-project-or-solution-file-the-directory-does-not-contain-a-project-or-solution-file
# - http://help.appveyor.com/discussions/questions/1184-build-config-vs-appveyoryaml
version: '{build}'
branches:
except:
- /^travis.*$/
# Don't download the full Mesa history to speed up cloning. However the clone
# depth must not be too small, otherwise builds might fail when lots of patches
# are committed in succession, because the desired commit is not found on the
# truncated history.
#
# See also:
# - https://www.appveyor.com/blog/2014/06/04/shallow-clone-for-git-repositories
clone_depth: 100
cache:
- win_flex_bison-2.4.5.zip
- llvm-3.3.1-msvc2013-mtd.7z
os: Visual Studio 2013
environment:
WINFLEXBISON_ARCHIVE: win_flex_bison-2.4.5.zip
LLVM_ARCHIVE: llvm-3.3.1-msvc2013-mtd.7z
install:
# Check pip
- python --version
- python -m pip --version
# Install Mako
- python -m pip install --egg Mako
# Install pywin32 extensions, needed by SCons
- python -m pip install pypiwin32
# Install SCons
- python -m pip install --egg scons==2.4.1
- scons --version
# Install flex/bison
- if not exist "%WINFLEXBISON_ARCHIVE%" appveyor DownloadFile "http://downloads.sourceforge.net/project/winflexbison/%WINFLEXBISON_ARCHIVE%"
- 7z x -y -owinflexbison\ "%WINFLEXBISON_ARCHIVE%" > nul
- set Path=%CD%\winflexbison;%Path%
- win_flex --version
- win_bison --version
# Download and extract LLVM
- if not exist "%LLVM_ARCHIVE%" appveyor DownloadFile "https://people.freedesktop.org/~jrfonseca/llvm/%LLVM_ARCHIVE%"
- 7z x -y "%LLVM_ARCHIVE%" > nul
- mkdir llvm\bin
- set LLVM=%CD%\llvm
build_script:
- scons -j%NUMBER_OF_PROCESSORS% MSVC_VERSION=12.0 llvm=1
after_build:
- scons -j%NUMBER_OF_PROCESSORS% MSVC_VERSION=12.0 llvm=1 check
# It's possible to setup notification here, as described in
# http://www.appveyor.com/docs/notifications#appveyor-yml-configuration , but
# doing so would cause the notification settings to be replicated across all
# repos, which is most likely undesired. So it's better to rely on the
# Appveyor global/project notification settings.
|
<filename>ips_list.yml
common_cells:
commit: v1.20.0
group: pulp-platform
common_verification:
commit: v0.1.1
group: pulp-platform
|
<reponame>zstars/weblabdeusto<gh_stars>0
language: python
services:
- redis-server
python: 2.7
install:
- pip install -r server/src/requirements.txt -r server/src/requirements_recommended.txt -r server/src/requirements_suggested.txt -r server/src/requirements_testing.txt
- python server/src/develop.py --deploy-test-db --db-engine=mysql --db-create-db --db-admin-user=root --db-admin-passwd=""
- npm install -g mocha
- cd experiments/unmanaged/http/nodejs; npm install; cd ../../../../;
script:
- cd server/src; python develop.py; cd ../..;
- cd experiments/unmanaged/http/nodejs/; mocha; cd ../../../../;
|
# Human readable task name
name: pythia
# Long form description.
description: |+
Yet another oracle, but the queries are costly and limited so be frugal with them.
# The flag
flag: CTF{gCm_1s_n0t_v3ry_r0bust_4nd_1_sh0uld_us3_s0m3th1ng_els3_h3r3}
# Task category. (one of hw, crypto, pwn, rev, web, net, misc)
category: crypto
# === the fields below will be filled by SRE or automation ===
# Task label
label: ''
# URL for web challenges
link: ''
# host/port for non-web challenges
host: 'pythia.2021.ctfcompetition.com 1337'
# the URL for attachments, to be filled in by automation
attachment: ''
# is this challenge released? Will be set by SREs
visible: false
|
language: python
dist: Xenial
python: "3.6"
install:
# Get Migen / LiteX / Cores
- wget https://raw.githubusercontent.com/enjoy-digital/litex/master/litex_setup.py
- python3 litex_setup.py init install
# Install LiteX-Boards
- python3 setup.py develop
before_script:
# Get RISC-V toolchain
- wget https://static.dev.sifive.com/dev-tools/riscv64-unknown-elf-gcc-8.1.0-2019.01.0-x86_64-linux-ubuntu14.tar.gz
- tar -xvf riscv64-unknown-elf-gcc-8.1.0-2019.01.0-x86_64-linux-ubuntu14.tar.gz
- export PATH=$PATH:$PWD/riscv64-unknown-elf-gcc-8.1.0-2019.01.0-x86_64-linux-ubuntu14/bin/
script: python setup.py test
|
<reponame>c-rus/legoHDL<filename>.github/workflows/test.yml<gh_stars>1-10
name: test
on: [push]
jobs:
test:
strategy:
fail-fast: false
matrix:
python-version: ['3.5', '3.x']
runs-on: windows-latest
steps:
- uses: actions/checkout@v2
#setup python
- uses: actions/[email protected]
with:
python-version: ${{ matrix.python-version }}
- run: python -c "import sys; print(sys.version)"
#install and configure legoHDL
- name: Install legoHDL
run: pip install .
#perform initial legoHDL setup
- name: Setup legoHDL
run: printf 'y\n\nchase\ngedit\n~/testworkspace/\ny\n' | legohdl
#run the testing script
- name: Test legoHDL
run: python ./test/legohdl/test.py
#collect testing data/results
- name: Archive test results
uses: actions/upload-artifact@v2
with:
name: unit-test-report
path: output/
|
name: sim-compile
parameters:
tool: "XCELIUM"
commands:
- bash run_sim.sh
inputs:
- design.v
- header
outputs:
- sim.log
postconditions:
- assert File( 'outputs/sim.log' ) # must exist
- assert 'Error,' not in File( 'outputs/sim.log' )
- assert '*E,' not in File( 'outputs/sim.log' )
|
<filename>src_files.yml
fpu_interco:
vlog_opts: [
-L fpnew_lib,
]
incdirs: [
../cv32e40p/rtl/include,
]
files: [
../cv32e40p/rtl/include/riscv_defines.sv,
FP_WRAP/fp_iter_divsqrt_msv_wrapper_2_STAGE.sv,
FP_WRAP/fpnew_wrapper.sv,
RTL/AddressDecoder_Req_FPU.sv,
RTL/AddressDecoder_Resp_FPU.sv,
RTL/ArbitrationTree_FPU.sv,
RTL/FanInPrimitive_Req_FPU.sv,
RTL/FanInPrimitive_Resp_FPU.sv,
RTL/optimal_alloc.sv,
RTL/FPU_clock_gating.sv,
RTL/LFSR_FPU.sv,
RTL/RequestBlock_FPU.sv,
RTL/ResponseBlock_FPU.sv,
RTL/ResponseTree_FPU.sv,
RTL/RR_Flag_Req_FPU.sv,
RTL/shared_fpu_cluster.sv,
RTL/fpu_demux.sv,
RTL/XBAR_FPU.sv,
]
|
<reponame>mundaym/ibex
- test: riscv_instr_cov_debug_test
description: >
Functional coverage debug test, this is not a functional test to the core.
iterations: 1
gen_test: riscv_instr_cov_debug_test
no_iss: 1
no_gcc: 1
no_post_compare: 1
- test: riscv_instr_cov_test
description: >
Parse the instruction information from the CSV trace log, sample functional
coverage from the instruction trace.
iterations: 1
gen_test: riscv_instr_cov_test
no_iss: 1
no_gcc: 1
no_post_compare: 1
|
name: eth_top
clock_port: wb_clk_i
verilog:
- eth_clockgen.v
- eth_cop.v
- eth_crc.v
- eth_defines.v
- eth_fifo.v
- eth_maccontrol.v
- eth_macstatus.v
- eth_miim.v
- eth_outputcontrol.v
- eth_random.v
- eth_receivecontrol.v
- eth_register.v
- eth_registers.v
- eth_rxaddrcheck.v
- eth_rxcounters.v
- eth_rxethmac.v
- eth_rxstatem.v
- eth_shiftreg.v
- eth_spram_256x32.v
- eth_top.v
- eth_transmitcontrol.v
- eth_txcounters.v
- eth_txethmac.v
- eth_txstatem.v
- eth_wishbone.v
- timescale.v
- xilinx_dist_ram_16x32.v
|
name: CI
on:
push:
paths-ignore:
- .git*
- scripts/**
- README.md
pull_request:
jobs:
all-build:
strategy:
matrix:
platform: [ubuntu-20.04, macos-latest]
rust: [stable]
runs-on: ${{ matrix.platform }}
steps:
- uses: actions/checkout@v2
with:
submodules: recursive
- name: Installing Verilator
if: ${{ startsWith(matrix.platform, 'mac') }}
run: brew install verilator
- name: Installing Verilator
if: ${{ startsWith(matrix.platform, 'ubuntu') }}
run: sudo apt-get install verilator
- name: rustup
uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: ${{ matrix.rust }}
override: true
components: rustfmt, clippy
- name: check cargo build
uses: actions-rs/cargo@v1
with:
command: build
args: --all
- name: check cargo test
uses: actions-rs/cargo@v1
with:
command: test
args: --all
- name: check cargo fmt
uses: actions-rs/cargo@v1
with:
command: fmt
args: --all -- --check
- name: check cargo clippy
uses: actions-rs/cargo@v1
with:
command: clippy
# args: -- -D warnings
|
<gh_stars>10-100
name: verilator-tests
on: [push, pull_request]
jobs:
build:
runs-on: ubuntu-18.04
steps:
- uses: actions/checkout@v2
with:
lfs: true
- name: Install Verilator
run: |
wget https://github.com/sifive/verilator/releases/download/4.036-0sifive2/verilator_4.036-0sifive2_amd64.deb
sudo dpkg -i verilator_4.036-0sifive2_amd64.deb
rm -f verilator_4.036-0sifive2_amd64.deb
- name: Print Verilator version
run: verilator --version
- name: Run tests using Verilator
run: |
cd sim
./run.sh
|
#
# List of IPs and relative branch/commit-hash/tag.
# Uses the YAML syntax.
#
# Examples:
#
# or10n:
# commit: tags/PULP3_final
# domain: [cluster]
# udma:
# commit: 62b10440
# domain: [soc]
# axi_slice:
# commit: tags/pulpissimo-v1.0
# domain: [soc,cluster]
# If a *tag* or *commit* is referenced, the IP will be in a
# state of DETACHED HEAD. Before committing any additional
# work, make sure to checkout a branch.
#
L2_tcdm_hybrid_interco:
commit: tags/pulpissimo-v1.0
adv_dbg_if:
commit: <PASSWORD>
apb/apb2per:
commit: tags/pulpissimo-v1.0
apb/apb_adv_timer:
commit: tags/pulpissimo-v1.0
apb/apb_fll_if:
commit: tags/pulpissimo-v1.0
apb/apb_gpio:
commit: tags/pulpissimo-v1.0
apb/apb_node:
commit: tags/pulpissimo-v1.0
apb_interrupt_cntrl:
commit: tags/pulpissimo-v1.0
axi/axi_node:
commit: tags/pulpissimo-v1.0
axi/axi_slice:
commit: tags/pulpissimo-v1.0
axi/axi_slice_dc:
commit: 5f889f887e58f6d5dadd79616b16e1a63381d569
timer_unit:
commit: tags/pulpissimo-v1.0
common_cells:
commit: master
fpu:
commit: <PASSWORD>
jtag_pulp:
commit: tags/pulpissimo-v1.0
riscv:
commit: tags/pulpissimo-v2.0.0
zero-riscy:
commit: tags/pulpissimo-v1.0.1
scm:
commit: tags/pulpissimo-v1.0
generic_FLL:
commit: tags/pulpissimo-v1.0
tech_cells_generic:
commit: tags/pulpissimo-v1.0
udma/udma_core:
commit: tags/pulpissimo-v1.0.2
udma/udma_uart:
commit: tags/pulpissimo-v1.0.1
udma/udma_i2c:
commit: tags/pulpissimo-v1.0.2
udma/udma_i2s:
commit: tags/pulpissimo-v1.0
udma/udma_qspi:
#commit: tags/pulpissimo-v1.0
commit: master
udma/udma_sdio:
commit: tags/pulpissimo-v1.0.1
udma/udma_camera:
commit: tags/pulpissimo-v1.0
hwpe-mac-engine:
commit: f1d0b72
|
name: glb_tile
commands:
- bash get_glb_outputs.sh
inputs:
- design.v
outputs:
- glb_tile_tt.lib
- glb_tile.lef
- glb_tile.gds
- glb_tile.lvs.v
- glb_tile.vcs.v
- glb_tile.sdf
- glb_tile_sram.spi
- glb_tile_sram.v
- glb_tile_sram_pwr.v
- glb_tile_sram_tt.lib
- glb_tile_sram_ff.lib
- glb_tile_sram_tt.db
postconditions:
- assert File( 'outputs/glb_tile_tt.lib' ) # must exist
- assert File( 'outputs/glb_tile.lef' ) # must exist
- assert File( 'outputs/glb_tile.gds' ) # must exist
- assert File( 'outputs/glb_tile.lvs.v' ) # must exist
- assert File( 'outputs/glb_tile.vcs.v' ) # must exist
- assert File( 'outputs/glb_tile.sdf' ) # must exist
- assert File( 'outputs/glb_tile_sram.spi' ) # must exist
- assert File( 'outputs/glb_tile_sram.v' ) # must exist
- assert File( 'outputs/glb_tile_sram_pwr.v' ) # must exist
- assert File( 'outputs/glb_tile_sram_tt.lib' ) # must exist
- assert File( 'outputs/glb_tile_sram_ff.lib' ) # must exist
- assert File( 'outputs/glb_tile_sram_tt.db' ) # must exist
|
<filename>.github/labeler.yml
# See https://github.com/actions/labeler#common-examples for defining patterns.
# The globs use "minimatch" syntax found at https://github.com/isaacs/minimatch
#
# WARNING: Due to this file being yaml, any string starting with `*` must be
# wrapped in quotes.
# Tools
ABC:
- abc_with_bb_support/*
- abc_with_bb_support/**/*
- abc/*
- abc/**/*
ACE2:
- ace2/*
- ace2/**/*
blifexplorer:
- blifexplorer/*
- blifexplorer/**/*
Odin:
- ODIN_II/*
- ODIN_II/**/*
- odin2_helper/*
- odin2_helper/**/*
VPR:
- vpr/*
- vpr/**/*
# Libraries
libarchfpga:
- libs/libarchfpga/*
- libs/libarchfpga/**/*
libeasygl:
- libs/libeasygl/*
- libs/libeasygl/**/*
liblog:
- libs/liblog/*
- libs/liblog/**/*
libpugiutil:
- libs/libpugiutil/*
- libs/libpugiutil/**/*
libvtrutil:
- libs/libvtrutil/*
- libs/libvtrutil/**/*
external_libs:
- libs/EXTERNAL/*
- libs/EXTERNAL/**/*
# General areas
docs:
- docs/*
- docs/**/*
- "*README*"
- "*.md"
- tutorial
- "*.rst"
infra:
- .github/*
- .github/**/*
- Dockerfile
- "*docker*"
build:
- Makefile
- "*.make"
- CMakeLists.txt
- cmake
tests:
- "*_test.py"
- "*test*"
- "*TESTS*"
scripts:
- scripts
- "*.pl"
- "*.py"
- "*.sh"
VTR Flow:
- vtr_flow
# Tag pull requests with the languages used to make it easy to see what is
# being used.
lang-hdl:
- "*.v"
- "*.sv"
lang-cpp:
- "*.c*"
- "*.h"
lang-perl:
- "*.pl"
- "*perl*"
lang-python:
- "*.py"
lang-shell:
- "*.sh"
lang-netlist:
- "*.blif"
- "*.eblif"
- "*.edif"
- "*.vqm"
lang-make:
- "*.make"
- Makefile
- CMakeLists.txt
|
language: go
go:
- 1.11.x
script:
- GOOS=linux go build
- GOOS=darwin go build
- GOOS=freebsd go build
- go test -v -coverprofile=coverage.txt -covermode=atomic
- go test -v -race
after_success:
- bash <(curl -s https://codecov.io/bash)
|
<reponame>SubjeBilisim/anasymod
name: Regression
on:
push:
pull_request:
workflow_dispatch:
schedule:
- cron: 0 11 * * ?
jobs:
linux:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v2
with:
fetch-depth: 0
- name: Set up Python 3.7
uses: actions/setup-python@v2
with:
python-version: 3.7
- name: Install dependencies
run: sudo apt-get install iverilog
- name: Run regression test
run: source regress.sh
env:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
- name: SonarCloud Scan
uses: sonarsource/sonarcloud-github-action@master
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
mac:
runs-on: macos-latest
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Set up Python 3.7
uses: actions/setup-python@v2
with:
python-version: 3.7
- name: Install dependencies
run: |
brew install icarus-verilog
- name: Run regression test
run: source regress.sh
env:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
windows:
runs-on: windows-latest
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Set up Python 3.7
uses: actions/setup-python@v2
with:
python-version: 3.7
- name: Install dependencies
run: |
curl -L https://github.com/sgherbst/anasymod/releases/download/bogus/iverilog-v11-20201123-x64.tar.gz > iverilog-v11-20201123-x64.tar.gz
tar xzvf iverilog-v11-20201123-x64.tar.gz
shell: bash
- name: Run regression test
run: |
export ICARUS_INSTALL_PATH=`realpath iverilog`
echo $ICARUS_INSTALL_PATH
source regress.sh
env:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
shell: bash
- name: Archive debugging artifacts
if: always()
uses: actions/upload-artifact@v2
with:
name: debug-artifacts-windows
path: |
**/models/default/main/*.*v
|
<reponame>diorga/snitch
# Copyright 2020 ETH Zurich and University of Bologna.
# Licensed under the Apache License, Version 2.0, see LICENSE for details.
# SPDX-License-Identifier: Apache-2.0
site_name: Snitch
theme:
name: material
icon:
repo: fontawesome/brands/github
repo_url: https://github.com/pulp-platform/snitch
repo_name: pulp-platform/snitch
markdown_extensions:
- admonition
- def_list
- pymdownx.highlight
- pymdownx.superfences
- pymdownx.tabbed
- pymdownx.emoji:
emoji_index: !!python/name:materialx.emoji.twemoji
emoji_generator: !!python/name:materialx.emoji.to_svg
nav:
- Home: index.md
- User Guide:
- Getting Started: ug/getting_started.md
- Docker: ug/docker.md
- Directory Structure: ug/directory_structure.md
- Documentation: ug/documentation.md
- Systems:
- Snitch Cluster:
- Guide: ug/snitch_cluster.md
- Schema: schema-doc/snitch_cluster.md
- Reference Manual:
- Snitch: rm/snitch/index.md
- Snitch Cluster: rm/snitch_cluster/index.md
- Reqrsp Interface: rm/reqrsp_interface/index.md
- Custom Instructions: rm/custom_instructions.md
# - Solder: rm/solder.md
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.