Search is not available for this dataset
content
stringlengths 0
376M
|
---|
<reponame>suarezvictor/litex
name: ci
on: [push, pull_request]
jobs:
build:
runs-on: ubuntu-18.04
steps:
# Checkout Repository
- name: Checkout
uses: actions/checkout@v2
# Install Tools
- name: Install Tools
run: |
sudo apt-get install wget build-essential python3 ninja-build
sudo apt-get install verilator libevent-dev libjson-c-dev
pip3 install setuptools
pip3 install requests
pip3 install pexpect
pip3 install meson
pip3 install amaranth-yosys
# Install (n)Migen / LiteX / Cores
- name: Install LiteX
run: |
wget https://raw.githubusercontent.com/enjoy-digital/litex/master/litex_setup.py
python3 litex_setup.py --config=full --init --install --user
# Install GCC Toolchains
- name: Install GCC Toolchains
run: |
wget https://raw.githubusercontent.com/enjoy-digital/litex/master/litex_setup.py
python3 litex_setup.py --gcc=riscv
sudo mkdir /usr/local/riscv
sudo cp -r $PWD/../riscv64-*/* /usr/local/riscv
python3 litex_setup.py --gcc=openrisc
sudo mkdir /usr/local/openrisc
sudo cp -r $PWD/../openrisc-*/* /usr/local/openrisc
# Install Project
- name: Install Project
run: python3 setup.py develop --user
# Test
- name: Run Tests
run: |
export PATH=/usr/local/riscv/bin:$PATH
export PATH=/usr/local/openrisc/bin:$PATH
python3 setup.py test
|
---
algorithm:
class: Spea2
population_size: 400
max_archive_size: 300
duplicate_elimination: false
shorten_archive_individual: true
probabilities:
crossover: 0.5
mutation: 0.01
injection: 0.9
init:
method: ramped # grow or full or ramped
sensible_depth: 7
inject:
method: grow # grow or full or random
sensible_depth: 7
termination:
max_steps: 1000
on_individual: stopping_condition
grammar:
class: Abnf::File
filename: sample/vhdl_design/grammar.abnf
mapper:
class: DepthFirst
wraps_to_fail: 3
track_support_on: true
selection:
class: Tournament
tournament_size: 2
selection_rank: # do not change
class: Spea2Ranking
crossover:
class: CrossoverLHS
mutation:
class: MutationStructural
store:
class: Store
filename: ./vhdl_spea2_lhsc.store
report:
class: PopulationReport
individual:
class: PipedIndividual
shorten_chromozome: false
_weak_pareto:
:fitness: maximize
:used_length: minimize
_pipe_output:
- :fitness: to_i
_thresholds:
:fitness: 16
:used_length: 70
evaluator:
class: WorkPipes
commands:
- 'ruby sample/vhdl_design/adder_pipe.rb 1'
- 'ruby sample/vhdl_design/adder_pipe.rb 2'
|
name: Deploy to PyPI
on:
push:
tags:
- v*
jobs:
build:
runs-on: ubuntu-latest
if: "!contains(github.event.head_commit.message, 'skip ci')"
steps:
- uses: actions/checkout@v2
- name: Checkout submodules
shell: bash
run: |
auth_header="$(git config --local --get http.https://github.com/.extraheader)"
git submodule sync --recursive
git -c "http.extraheader=$auth_header" -c protocol.version=2 submodule update --init --force --recursive --depth=1
- uses: actions/setup-python@v2
with:
python-version: '3.8'
- name: Install verilator
shell: bash
run: |
sudo apt install -y verilator libgmp-dev libmpfr-dev libmpc-dev
verilator --version
- name: Install Python packages
shell: bash -l {0}
run: |
pip install "pytest<6"
pip install pytest-cov pytest-pycodestyle
pip install mantle>=2.0.0 # for tests.common
pip install vcdvcd decorator kratos
pip install .
- name: Pytest
shell: bash -l {0}
run: |
pytest --pycodestyle --cov-report=xml --cov=fault tests/ -v -r s
- name: Coverage
shell: bash -l {0}
run: |
bash <(curl -s https://codecov.io/bash)
- name: Install deploy packages
shell: bash -l {0}
run: |
pip install twine
- name: Upload to PyPI
shell: bash -l {0}
run: |
source .travis/deploy.sh
env:
PYPI_PASSWORD: ${{ secrets.PYPI_PASSWORD }}
|
# Copyright 2019 Xilinx Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
description: face re-identification model(light-weighted) in tracking senario.
input size: 80*80
float ops: 90M
task: face reid
framework: pytorch
prune: 'no'
version: 1.4
files:
- name: pt_facereid-small_80_80_90M_1.4
type: float & quantized
board: GPU
download link: https://www.xilinx.com/bin/public/openDownload?filename=pt_facereid-small_80_80_90M_1.4.zip
checksum: f6714310a322c1a07baa7cdc6867d9a8
- name: facereid-small_pt
type: xmodel
board: zcu102 & zcu104 & kv260
download link: https://www.xilinx.com/bin/public/openDownload?filename=facereid-small_pt-zcu102_zcu104_kv260-r1.4.1.tar.gz
checksum: ba9e278f0913235a6b56d42ecc63084c
- name: facereid-small_pt
type: xmodel
board: vck190
download link: https://www.xilinx.com/bin/public/openDownload?filename=facereid-small_pt-vck190-r1.4.1.tar.gz
checksum: fad7137675aa04a6405fde744f8cf83a
- name: facereid-small_pt
type: xmodel
board: u50-DPUCAHX8H & u50lv-DPUCAHX8H & u280-DPUCAHX8H
download link: https://www.xilinx.com/bin/public/openDownload?filename=facereid-small_pt-u50-u50lv-u280-DPUCAHX8H-r1.4.1.tar.gz
checksum: b2235dce7aaf82e8af7e9cbb41548d45
- name: facereid-small_pt
type: xmodel
board: u50-DPUCAHX8L & u50lv-DPUCAHX8L & u280-DPUCAHX8L
download link: https://www.xilinx.com/bin/public/openDownload?filename=facereid-small_pt-u50-u50lv-u280-DPUCAHX8L-r1.4.1.tar.gz
checksum: 2629b8e6e7272810403876a65bfe725d
license: https://github.com/Xilinx/Vitis-AI/blob/master/LICENSE
|
<filename>.github/workflows/coverage.yml
name: Coverage
on: [push]
jobs:
build:
strategy:
matrix:
os: [ubuntu-latest]
python-version: [3.7]
rust-version: [stable]
runs-on: ${{ matrix.os }}
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v1
with:
python-version: ${{ matrix.python-version }}
- name: Set up Rust ${{ matrix.rust-version }}
uses: hecrj/setup-rust-action@v1
with:
rust-version: ${{ matrix.rust-version }}
- name: Install dependencies
run: |
python -m pip install --upgrade pip
- name: Install package
run: |
pip install -e .
- name: Test with pytest while generating coverage information
run: |
pip install pytest pytest-cov
pytest --cov-report=xml --cov=svinst tests -v -r s
- name: Upload coverage
uses: codecov/[email protected]
with:
token: ${{ secrets.CODECOV_TOKEN }}
|
<reponame>hito0512/Vitis-AI
# Copyright 2019 Xilinx Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
description: SESR-S for Single Image Super-Resolution.
input size: 360*640
float ops: 7.48G
task: super-resolution
framework: pytorch
prune: 'no'
version: 2.0
files:
- name: pt_SESR-S_DIV2K_360_640_7.48G_2.0
type: float & quantized
board: GPU
download link: https://www.xilinx.com/bin/public/openDownload?filename=pt_SESR-S_DIV2K_360_640_7.48G_2.0.zip
checksum: bf5ab39c06d28e71511c7296e5f9c009
- name: SESR_S_pt
type: xmodel
board: zcu102 & zcu104 & kv260
download link: https://www.xilinx.com/bin/public/openDownload?filename=SESR_S_pt-zcu102_zcu104_kv260-r2.0.0.tar.gz
checksum: 6991f69e9ced898f2fbd40167c8085e2
- name: SESR_S_pt
type: xmodel
board: vck190
download link: https://www.xilinx.com/bin/public/openDownload?filename=SESR_S_pt-vck190-r2.0.0.tar.gz
checksum: 0910332e2b4545297c903e9606c7434a
- name: SESR_S_pt
type: xmodel
board: vck50006pe-DPUCVDX8H-DWC
download link: https://www.xilinx.com/bin/public/openDownload?filename=SESR_S_pt-vck50006pe-DPUCVDX8H-DWC-r2.0.0.tar.gz
checksum: 1cb7147fe0b84016d20b0249e72da0d6
- name: SESR_S_pt
type: xmodel
board: vck50008pe-DPUCVDX8H
download link: https://www.xilinx.com/bin/public/openDownload?filename=SESR_S_pt-vck50008pe-DPUCVDX8H-r2.0.0.tar.gz
checksum: 121ff0cbbcb37b8a7ba53e4ee4f709fb
- name: SESR_S_pt
type: xmodel
board: u50lv-DPUCAHX8H
download link: https://www.xilinx.com/bin/public/openDownload?filename=SESR_S_pt-u50lv-DPUCAHX8H-r2.0.0.tar.gz
checksum: 495a10ace5bafc5ff786735a1da570be
- name: SESR_S_pt
type: xmodel
board: u50lv-DPUCAHX8H-DWC & u55c-DPUCAHX8H-DWC
download link: https://www.xilinx.com/bin/public/openDownload?filename=SESR_S_pt-u55c-u50lv-DPUCAHX8H-DWC-r2.0.0.tar.gz
checksum: 3867a04b035c730f77cfc4a5b2638385
license: https://github.com/Xilinx/Vitis-AI/blob/master/LICENSE
|
# Settings for the saed32 technology to be overriden by the project.
technology.saed32:
# Path to the directory which contains all the saed32 tarballs (e.g. SAED_EDKxxxxxxx_CORE_LVT_v_xxxxxxxx.tar.gz)
tarball_dir: ""
# Set some defaults for this technology.
# For quick performance evaluations, use the typical voltage+temperature corners.
# Nominal voltage for 32nm technology is 1.05V and is the only voltage provided
# for normal SRAMs.
# TODO: add default temperature corners.
vlsi:
# Technology dimension
# TODO: add more technology files to make some CAD tools happy with the actual number of 32nm
core.node: 33
inputs:
# Supply voltages.
supplies:
VDD: "1.05 V"
GND: "0 V"
|
---
# An employee record
name: Example Developer
#RRD DB Loc/Name
rrddb: "/var/www/html/wundergrounddata.rrd"
csvdatafile: "/usr/local/wun/weatherdata.csv"
graph-location: "/var/www/html/wun/"
photo-location: "/var/www/html/camera/"
use-wun: true
wun-url: "http://api.wunderground.com/api/<api_key>/conditions/q/zmw:<zmw>.json"
wun-almanac-url: "http://api.wunderground.com/api/<api_key>/almanac/q/<state>/<city>.json"
wun-astronomy-url: "http://api.wunderground.com/api/<api_key>/astronomy/q/<state>/<city>.json"
wun-api-keyfile: "/etc/wunapikey.txt"
wun-api-location-city: Austin
wun-api-location-state: TX
#z - zip code #m - magic station No. #w - wmo identifier
wun-api-location-zmw: 78701.1.99999
archive-location: "/var/www/html/archive/"
history-file: "almanac_current.json"
astronomy-file: "astronomy_current.json"
|
<gh_stars>0
name: global_controller
commands:
- bash get_global_controller_outputs.sh
inputs:
- design.v
outputs:
- global_controller_tt.lib
- global_controller.lef
- global_controller.gds
- global_controller.vcs.v
- global_controller.sdf
- global_controller.lvs.v
postconditions:
- assert File( 'outputs/global_controller_tt.lib' ) # must exist
- assert File( 'outputs/global_controller.lef' ) # must exist
- assert File( 'outputs/global_controller.gds' ) # must exist
- assert File( 'outputs/global_controller.vcs.v' ) # must exist
- assert File( 'outputs/global_controller.sdf' ) # must exist
- assert File( 'outputs/global_controller.lvs.v' ) # must exist
|
# Copyright 2020 ETH Zurich and University of Bologna.
# Licensed under the Apache License, Version 2.0, see LICENSE for details.
# SPDX-License-Identifier: Apache-2.0
name: publish-docs
on:
push:
branches:
- master
jobs:
deploy:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions/setup-python@v2
with:
python-version: 3.x
- run: pip install -r docs/requirements.txt
- run: mkdocs gh-deploy --force
|
<gh_stars>10-100
ignore:
- "3rdparty/**/*"
# failure.c generates libtenyrfailure, which is meant for *stimulating*
# coverage, but should not itself contribute to the number of
# lines/branches/functions covered.
- src/devices/failure.c
coverage:
range: 75..95
precision: 1
|
labelName: waiting-for-user-information
labelColor: f7c6c7
commentText: >
Please follow the [issue template](https://github.com/Glavin001/atom-beautify/blob/master/ISSUE_TEMPLATE.md) provided.
More specifically, update the original comment for this issue by adding a link to the
required [debug.md](https://github.com/Glavin001/atom-beautify/blob/master/docs/troubleshooting.md#how-to-create-debugmd-gist)
gist which includes debugging information that answers our most commonly asked questions.
Thank you.
checkCheckboxes: true
keywords:
- gist.github.com
|
---
MainSourceFile: 'Utilities/ReleaseScripts/test/test-clang-tidy.cc'
Diagnostics:
- DiagnosticName: modernize-use-nullptr
DiagnosticMessage:
Message: use nullptr
FilePath: 'Utilities/ReleaseScripts/test/test-clang-tidy.cc'
FileOffset: 69
Replacements:
- FilePath: 'Utilities/ReleaseScripts/test/test-clang-tidy.cc'
Offset: 69
Length: 1
ReplacementText: nullptr
- DiagnosticName: modernize-use-nullptr
DiagnosticMessage:
Message: use nullptr
FilePath: 'Utilities/ReleaseScripts/test/test-clang-tidy.cc'
FileOffset: 206
Replacements:
- FilePath: 'Utilities/ReleaseScripts/test/test-clang-tidy.cc'
Offset: 206
Length: 1
ReplacementText: nullptr
- DiagnosticName: modernize-use-nullptr
DiagnosticMessage:
Message: use nullptr
FilePath: 'Utilities/ReleaseScripts/test/test-clang-tidy.cc'
FileOffset: 235
Replacements:
- FilePath: 'Utilities/ReleaseScripts/test/test-clang-tidy.cc'
Offset: 235
Length: 1
ReplacementText: nullptr
- DiagnosticName: modernize-use-override
DiagnosticMessage:
Message: 'prefer using ''override'' or (rarely) ''final'' instead of ''virtual'''
FilePath: 'Utilities/ReleaseScripts/test/test-clang-tidy.cc'
FileOffset: 385
Replacements:
- FilePath: 'Utilities/ReleaseScripts/test/test-clang-tidy.cc'
Offset: 377
Length: 8
ReplacementText: ''
- FilePath: 'Utilities/ReleaseScripts/test/test-clang-tidy.cc'
Offset: 399
Length: 0
ReplacementText: ' override'
- DiagnosticName: modernize-use-override
DiagnosticMessage:
Message: 'prefer using ''override'' or (rarely) ''final'' instead of ''virtual'''
FilePath: 'Utilities/ReleaseScripts/test/test-clang-tidy.cc'
FileOffset: 415
Replacements:
- FilePath: 'Utilities/ReleaseScripts/test/test-clang-tidy.cc'
Offset: 403
Length: 8
ReplacementText: ''
- FilePath: 'Utilities/ReleaseScripts/test/test-clang-tidy.cc'
Offset: 427
Length: 0
ReplacementText: ' override'
...
|
<reponame>f110/wing
webhook_listener: :5000
build_namespace: bot
commit_author: cluster-loadmaster
commit_email: <EMAIL>
github_token_file: /home/dexter/dev/src/github.com/f110/k8s-cluster-maintenance-bot/github-token
app_id: 51841
installation_id: 6365451
app_private_key_file: /home/dexter/dev/src/github.com/f110/k8s-cluster-maintenance-bot/privatekey.pem
private_key_secret_name: github-private-key
storage_host: bot-data-hl-svc.bot.svc.cluster.local:9000
storage_token_secret_name: object-storage-token
artifact_bucket: build-artifact
host_aliases:
- hostnames: ["registry.f110.dev", "registry.storage.x.f110.dev"]
ip: 192.168.100.132
allow_repositories:
- f110/bot-staging
safe_mode: true # not apply changes actually
|
apiVersion: miniocontroller.min.io/v1beta1
kind: MinIOInstance
metadata:
name: object-storage
labels:
app: minio
spec:
metadata:
labels:
app: minio
annotations:
prometheus.io/path: /minio/prometheus/metrics
prometheus.io/port: "9000"
prometheus.io/scrape: "true"
image: minio/minio:RELEASE.2021-04-06T23-11-00Z
credsSecret:
name: object-storage-token
zones:
- name: zone-0
servers: 1
podManagementPolicy: Parallel
env:
- name: MINIO_BROWSER
value: "on"
- name: MINIO_PROMETHEUS_AUTH_TYPE
value: public
resources:
requests:
memory: 512Mi
cpu: 100m
liveness:
httpGet:
path: /minio/health/live
port: 9000
initialDelaySeconds: 30
periodSeconds: 20
readiness:
httpGet:
path: /minio/health/ready
port: 9000
initialDelaySeconds: 30
periodSeconds: 20
volumeClaimTemplate:
metadata:
name: data
spec:
accessModes:
- ReadWriteOnce
resources:
requests:
storage: 10Gi
---
apiVersion: v1
kind: Secret
metadata:
name: object-storage-token
type: Opaque
stringData:
accesskey: 22OekI3HF7n4
secretkey: <KEY>
|
run_sim_udp:vivado2018.3:modelsim10.6c:
extends: .template_base
image: ${IPBUS_DOCKER_REGISTRY}/ipbus-fw-dev-centos7:2021-04-26__ipbbdev-2021f_uhal2.8.0
tags:
- docker
- docker-cap-net-admin
- docker-device-net-tun
- xilinx-tools
stage: quick_checks
variables:
VIVADO_VERSION: "2018.3"
IPBB_SIMLIB_BASE: /scratch/xilinx-simlibs
script:
- export PATH=/software/mentor/modelsim_10.6c/modeltech/bin:$PATH
- export PATH=/opt/cactus/bin/uhal/tests:$PATH
- export LD_LIBRARY_PATH=/opt/cactus/lib:$LD_LIBRARY_PATH
- ipbb init work_area
- cd work_area
- ln -s ${CI_PROJECT_DIR} src/ipbus-firmware
- /${CI_PROJECT_DIR}/work_area/src/ipbus-firmware/tests/ci/test-run-sim-udp.sh sim
run_sim:vivado2018.3:modelsim10.6c:
extends: .template_base
image: ${IPBUS_DOCKER_REGISTRY}/ipbus-fw-dev-centos7:2021-04-26__ipbbdev-2021f_uhal2.8.0
tags:
- docker
- docker-cap-net-admin
- docker-device-net-tun
- xilinx-tools
stage: quick_checks
variables:
VIVADO_VERSION: "2018.3"
IPBB_SIMLIB_BASE: /scratch/xilinx-simlibs
script:
- sudo openvpn --mktun --dev tap0
- sudo /sbin/ifconfig tap0 up 192.168.201.1
- sudo chmod a+rw /dev/net/tun
- export PATH=/software/mentor/modelsim_10.6c/modeltech/bin:$PATH
- ipbb init work_area
- cd work_area
- ln -s ${CI_PROJECT_DIR} src/ipbus-firmware
- /${CI_PROJECT_DIR}/work_area/src/ipbus-firmware/tests/ci/test-run-sim.sh sim
run_ram_slaves_testbench_sim:vivado2018.3:modelsim10.6c:
extends: .template_base
image: ${IPBUS_DOCKER_REGISTRY}/ipbus-fw-dev-centos7:2021-04-26__ipbbdev-2021f_uhal2.8.0
tags:
- docker
- docker-cap-net-admin
- docker-device-net-tun
- xilinx-tools
stage: quick_checks
variables:
VIVADO_VERSION: "2018.3"
IPBB_SIMLIB_BASE: /scratch/xilinx-simlibs
script:
- sudo openvpn --mktun --dev tap0
- sudo /sbin/ifconfig tap0 up 192.168.201.1
- sudo chmod a+rw /dev/net/tun
- export PATH=/software/mentor/modelsim_10.6c/modeltech/bin:$PATH
- ipbb init work_area
- cd work_area
- ln -s ${CI_PROJECT_DIR} src/ipbus-firmware
- ipbb proj create sim ram_slvs_tb ipbus-firmware:tests/ram_slaves top_sim.dep
- cd proj/ram_slvs_tb
- ipbb sim setup-simlib
- ipbb sim ipcores
- ipbb sim fli-udp
- ipbb sim generate-project
- ./run_sim -c work.top -do 'run 1ms' -do 'quit'
run_ctr_slaves_testbench_sim:vivado2018.3:modelsim10.6c:
extends: .template_base
image: ${IPBUS_DOCKER_REGISTRY}/ipbus-fw-dev-centos7:2021-04-26__ipbbdev-2021f_uhal2.8.0
tags:
- docker
- xilinx-tools
stage: quick_checks
variables:
VIVADO_VERSION: "2018.3"
IPBB_SIMLIB_BASE: /scratch/xilinx-simlibs
script:
- export PATH=/software/mentor/modelsim_10.6c/modeltech/bin:$PATH
- export LD_LIBRARY_PATH=/opt/cactus/lib:$LD_LIBRARY_PATH
- ipbb init work_area
- cd work_area
- ln -s ${CI_PROJECT_DIR} src/ipbus-firmware
- /${CI_PROJECT_DIR}/work_area/src/ipbus-firmware/tests/ci/test-run-sim-slave-counters.sh
|
<reponame>Elon-J/OpenFASOC
name: Test documentation
on:
pull_request:
push:
schedule:
- cron: "0 2 * * *" # run at 2 AM UTC
jobs:
build-linux:
runs-on: ubuntu-latest
steps:
- name: Cancel Workflow Action
uses: styfle/[email protected]
- uses: actions/checkout@v2
- name: Set up Python 3.9
uses: actions/setup-python@v2
with:
python-version: 3.9
- name: Install dependencies
run: |
pip install -r requirements_dev.txt
pip install -e .
sudo apt install pandoc
- name: Test documentation
run: |
cd docs
make html
|
<reponame>hito0512/Vitis-AI
# Copyright 2019 Xilinx Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
description: ofa-resnet50 for Image Classification.
input size: 160*160
float ops: 1.8G
task: classification
framework: pytorch
prune: 'no'
version: 2.0
files:
- name: pt_OFA-resnet50_imagenet_160_160_900M_2.0
type: float & quantized
board: GPU
download link: https://www.xilinx.com/bin/public/openDownload?filename=pt_OFA-resnet50_imagenet_160_160_900M_2.0.zip
checksum: 93e562adda01ae2d4cdba50a3369eb2e
- name: ofa_resnet50_0_9B_pt
type: xmodel
board: zcu102 & zcu104 & kv260
download link: https://www.xilinx.com/bin/public/openDownload?filename=ofa_resnet50_0_9B_pt-zcu102_zcu104_kv260-r2.0.0.tar.gz
checksum: 763d04086c20cb3e6a300b4568c02264
- name: ofa_resnet50_0_9B_pt
type: xmodel
board: vck190
download link: https://www.xilinx.com/bin/public/openDownload?filename=ofa_resnet50_0_9B_pt-vck190-r2.0.0.tar.gz
checksum: 1e75b630138c482a2da4cfd69e3aa35d
- name: ofa_resnet50_0_9B_pt
type: xmodel
board: vck50006pe-DPUCVDX8H-DWC
download link: https://www.xilinx.com/bin/public/openDownload?filename=ofa_resnet50_0_9B_pt-vck50006pe-DPUCVDX8H-DWC-r2.0.0.tar.gz
checksum: f40411d872c0625de285fc255cc46d47
- name: ofa_resnet50_0_9B_pt
type: xmodel
board: u50lv-DPUCAHX8H
download link: https://www.xilinx.com/bin/public/openDownload?filename=ofa_resnet50_0_9B_pt-u50lv-DPUCAHX8H-r2.0.0.tar.gz
checksum: 1b226962ff4b95499be46a88a3e4f931
- name: ofa_resnet50_0_9B_pt
type: xmodel
board: u50lv-DPUCAHX8H-DWC & u55c-DPUCAHX8H-DWC
download link: https://www.xilinx.com/bin/public/openDownload?filename=ofa_resnet50_0_9B_pt-u55c-u50lv-DPUCAHX8H-DWC-r2.0.0.tar.gz
checksum: baec2c97e55032d190e31c7e08013f8b
license: https://github.com/Xilinx/Vitis-AI/blob/master/LICENSE
|
site_name: PyWire
pages:
- Index: index.md
- Basics: basics.md
- Timing: timing.md
- BRAM: bram.md
theme: readthedocs
|
# Copyright 2019 Xilinx Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
description: pruned psmnet for disparity estimation on scene flow.
input size: 576*960
float ops: 696G
task: disparity estimation
framework: pytorch
prune: '0.68'
version: 2.0
files:
- name: pt_psmnet_sceneflow_576_960_0.68_696G_2.0
type: float & quantized
board: GPU
download link: https://www.xilinx.com/bin/public/openDownload?filename=pt_psmnet_sceneflow_576_960_0.68_696G_2.0.zip
checksum: fa065dd17026709c2f5b4a597223c613
- name: PSMNet_pruned_0_pt
type: xmodel
board: vck190
download link: https://www.xilinx.com/bin/public/openDownload?filename=PSMNet_pruned_0_pt-vck190-r2.0.0.tar.gz
checksum: d4e40e0530eb3a7301bbd952ff0d4016
- name: PSMNet_pruned_1_pt
type: xmodel
board: vck190
download link: https://www.xilinx.com/bin/public/openDownload?filename=PSMNet_pruned_1_pt-vck190-r2.0.0.tar.gz
checksum: 675d070ea4d334df926060915a178833
- name: PSMNet_pruned_2_pt
type: xmodel
board: vck190
download link: https://www.xilinx.com/bin/public/openDownload?filename=PSMNet_pruned_2_pt-vck190-r2.0.0.tar.gz
checksum: 745668835dea4c97eecc9cd1134490fe
license: https://github.com/Xilinx/Vitis-AI/blob/master/LICENSE
|
<reponame>chao0502/aquila<gh_stars>0
language: cpp
# run on new infrastructure
sudo: false
cache:
apt: true
directories:
$RISCV
$HOME/.sbt
timeout: 1000
# required packages to install
dist: bionic
jdk:
- openjdk8
addons:
apt:
sources:
- ubuntu-toolchain-r-test
packages:
- gcc-7
- g++-7
- gperf
- autoconf
- automake
- autotools-dev
- curl
- python3
- git
- ca-certificates
- libmpc-dev
- libmpfr-dev
- libgmp-dev
- gawk
- build-essential
- bison
- flex
- texinfo
- libusb-1.0-0-dev
- gperf
- libtool
- patchutils
- bc
- libexpat-dev
- zlib1g-dev
- valgrind
- verilator
env:
global:
- RISCV="/home/travis/riscv_install"
branches:
only:
- master
before_install:
- export CXX=g++-7 CC=gcc-7
# setup dependent paths
- export PATH=$RISCV/bin:$VERILATOR_ROOT/bin:$PATH
- export LIBRARY_PATH=$RISCV/lib
- export LD_LIBRARY_PATH=$RISCV/lib
- export C_INCLUDE_PATH=$RISCV/include
- export CPLUS_INCLUDE_PATH=$RISCV/include
# number of parallel jobs to use for make commands and simulation
- export NUM_JOBS=1
- git submodule update --init --recursive
stages:
- checkout
- compile1
- build_model
- test1
- test2
jobs:
include:
- stage: checkout
name: checkout gcc
script:
- verilate/ci/riscv_gcc_check.sh 0
- stage: compile1
name: build gcc
script:
- verilate/ci/riscv_gcc_check.sh 1
- rm -rf $RISCV/riscv-gnu-toolchain
- stage: build_model
name: build verilator model
script:
- cd verilate
- make clean
- make core_verilate
- stage: test
name: run riscv ui-p
script:
- cd verilate/ci
- ./run_rv_test.sh riscv32ui-p.list 0
- stage: test
name: run riscv um-p
script:
- cd verilate/ci
- ./run_rv_test.sh riscv32um-p.list 0
- stage: test
name: run riscv torture
script:
- cd verilate/ci
- ./run_rv_torture.sh 0
install: travis_wait
|
# This YAML file describes your package. Stack will automatically generate a
# Cabal file when you run `stack build`. See the hpack website for help with
# this file: <https://github.com/sol/hpack>.
name: NDP
version: '0.0.0'
github: "fortlogic/NDP"
license: MIT
author: "<NAME>"
maintainer: "<NAME>"
synopsis: "Nameless Data Processor: an FPGA sized computer"
description: The NDP is an experiment in computer design by an idiot (read amateur)
# category: Other
extra-source-files:
- CHANGELOG.md
- LICENSE.md
- package.yaml
- README.org
- stack.yaml
ghc-options: -Wall
library:
dependencies:
- base
- bytestring
- clash-prelude
- constraints
- data-default
- filepath
- ghc-typelits-extra
- ghc-typelits-knownnat
- ghc-typelits-natnormalise
- singletons
source-dirs: hardware
# generated-exposed-modules: Paths_NDP
generated-other-modules: Paths_NDP
executables:
ndp:
source-dirs: executable
main: Make/Main.hs
dependencies:
- SHA
- base
- bytestring
- clash-ghc
- conf
- directory
- netpbm
- posix-escape
- safe-globals
- shake
- unordered-containers
- vector
ghc-options:
- -rtsopts
- -threaded
- -with-rtsopts=-N
benchmarks:
NDP-benchmarks:
source-dirs: benchmark
main: Main.hs
dependencies:
- NDP
- base
- criterion
ghc-options:
- -rtsopts
- -threaded
- -with-rtsopts=-N
tests:
NDP-test-suite:
source-dirs: test-suite
main: Main.hs
dependencies:
- NDP
- QuickCheck
- base
- clash-prelude
- formatting
- ghc-typelits-extra
- ghc-typelits-knownnat
- ghc-typelits-natnormalise
- hspec
- tasty
- tasty-ant-xml
- tasty-expected-failure
- tasty-hspec
- tasty-hunit
- tasty-quickcheck
- template-haskell
ghc-options:
- -rtsopts
- -threaded
- -with-rtsopts=-N
|
<reponame>hito0512/Vitis-AI
# Copyright 2019 Xilinx Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
description: SA-Gate for RGBD Segmentation on NYUv2.
input size: 360*360
float ops: 59.71G
task: RGBD segmentation
framework: pytorch
prune: 'no'
version: 2.0
files:
- name: pt_sa-gate_NYUv2_360_360_59.71G_2.0
type: float & quantized
board: GPU
download link: https://www.xilinx.com/bin/public/openDownload?filename=pt_sa-gate_NYUv2_360_360_59.71G_2.0.zip
checksum: 67ce3c481b59688628062051683d139e
- name: SA_gate_pt
type: xmodel
board: vck190
download link: https://www.xilinx.com/bin/public/openDownload?filename=SA_gate_pt-vck190-r2.0.0.tar.gz
checksum: 1c2f5152e067df5b9f149a01d8c79477
license: https://github.com/Xilinx/Vitis-AI/blob/master/LICENSE
|
<filename>ansible/integration_test_start_up.yml
# Prepare integration test environment and start system under test
- hosts: all
tasks:
- name: get integration test start time
shell: "date '+%Y-%m-%d %H:%M:%S'"
register: integration_test_start_time
- hosts: epics_test_server
tasks:
- name: IOC is running
docker_container:
name: ioc
image: simpleioc
working_dir: /opt/epics/iocs/SimpleIoc/iocBoot/iocSimpleIoc
entrypoint: /opt/epics/iocs/SimpleIoc/bin/linux-x86_64/SimpleIoc st.cmd
tty: yes
network_mode: host
state: started
- hosts: test_orchestrator
tasks:
- name: create Kafka topics
command: "{{ test_orchestrator_virtualenv_path }}/bin/python {{ test_orchestrator_script_dir }}/create-kafka-topics.py {{ integration_test_kafka_bootstrap_servers }}"
- hosts: forwarder
tasks:
- name: forwarder is running
become: yes
service:
name: ecdc-forwarder
state: restarted
- hosts: kafka_to_nexus
tasks:
- name: file writer is running
become: yes
service:
name: ecdc-kafka-to-nexus
state: restarted
- name: output NeXus file is absent
become: yes
file:
path: "{{ kafka_to_nexus_data_dir }}/{{ integration_test_nexus_file_name }}"
state: absent
- hosts: all
tasks:
- name: populate service facts
service_facts:
- hosts: efu
tasks:
- name: fail test if efu service is not running
fail:
msg: "ecdc-efu-{{ item.instrument }}-{{ item.region }} service is not running"
loop: "{{ event_formation_unit_config }}"
when: ansible_facts.services['ecdc-efu-' + item.instrument|string + '-' + item.region|string + '.service'].state != 'running'
any_errors_fatal: true
- hosts: forwarder
tasks:
- name: fail test if forwarder service is not running
fail:
msg: "ecdc-forwarder service is not running"
when: ansible_facts.services['ecdc-forwarder.service'].state != 'running'
any_errors_fatal: true
- hosts: kafka_to_nexus
tasks:
- name: restart file writer
become: yes
systemd:
name: ecdc-kafka-to-nexus
state: restarted
any_errors_fatal: true
- hosts: test_orchestrator
tasks:
- name: set initial value of EPICS PV
shell: "\
{{ epics_base_dir }}/epics/bin/pvput SIMPLE:VALUE1 0; \
sleep 1"
|
---
- hosts: data-generators
gather_facts: False
tasks:
- name: start multigrid data
command: "{{daemonize_cmd}} {{script_path}}/datagen_multigrid.bash 1"
tags:
- multigrid
|
name: wb_dma_top
clock_port: clk_i
verilog:
- wb_dma_ch_arb.v
- wb_dma_ch_pri_enc.v
- wb_dma_ch_rf.v
- wb_dma_ch_sel.v
- wb_dma_de.v
- wb_dma_defines.v
- wb_dma_inc30r.v
- wb_dma_pri_enc_sub.v
- wb_dma_rf.v
- wb_dma_top.v
- wb_dma_wb_if.v
- wb_dma_wb_mast.v
- wb_dma_wb_slv.v
|
name: C/C++ CI Build and Test
on:
push:
branches: [master]
paths-ignore:
- 'python/**'
- 'tools/extra/fpgabist/**'
- 'tools/extra/packager/*.py'
- 'tools/extra/packager/metadata/**'
- 'tools/extra/packager/test/*.py'
- 'tools/extra/pac_hssi_config/*.py'
- 'tools/extra/fpgadiag/**'
- 'tools/utilities/**'
- 'scripts/*.py'
- 'platforms/scripts/platmgr/**'
- '.github/workflows/python-static-analysis.yml'
pull_request:
branches: [master]
paths-ignore:
- 'python/**'
- 'tools/extra/fpgabist/**'
- 'tools/extra/packager/*.py'
- 'tools/extra/packager/metadata/**'
- 'tools/extra/packager/test/*.py'
- 'tools/extra/pac_hssi_config/*.py'
- 'tools/extra/fpgadiag/**'
- 'tools/utilities/**'
- 'scripts/*.py'
- 'platforms/scripts/platmgr/**'
- '.github/workflows/python-static-analysis.yml'
jobs:
coding-style:
runs-on: ubuntu-20.04
strategy:
matrix:
lang-type: [c, cpp]
steps:
- uses: actions/checkout@v2
- name: update
run: sudo apt-get update -y
- name: Get Packages
uses: mstksg/get-package@v1
with:
apt-get: clang-format
- name: test ${{ matrix.lang-type }}
run: ${{ github.workspace }}/scripts/test-codingstyle-all.sh ${{ matrix.lang-type }}
build:
runs-on: ubuntu-20.04
strategy:
matrix:
build-type: [Debug, Release, RelWithDebInfo]
steps:
- uses: actions/checkout@v2
- name: update
run: sudo apt-get update -y
- name: Get Packages
uses: mstksg/get-package@v1
with:
apt-get: uuid-dev libjson-c-dev libhwloc-dev lcov libtbb-dev linux-headers-generic libedit-dev libudev-dev libcap-dev
- name: Get Python packages
run: python3 -m pip install setuptools --user
- name: configure ${{ matrix.build-type }}
run: mkdir ${{ github.workspace }}/.build && cd ${{ github.workspace }}/.build && cmake .. -DCMAKE_BUILD_TYPE=${{ matrix.build-type }}
- name: make ${{ matrix.build-type }}
run: cd ${{ github.workspace }}/.build && make -j
test:
runs-on: ubuntu-20.04
steps:
- uses: actions/checkout@v2
- name: update
run: sudo apt-get update -y
- name: Get Packages
uses: mstksg/get-package@v1
with:
apt-get: uuid-dev libjson-c-dev libhwloc-dev lcov libtbb-dev libedit-dev libudev-dev libcap-dev
- name: set hugepages
run: sudo sysctl -w vm.nr_hugepages=8
- name: run unit tests
run: ${{ github.workspace }}/scripts/cover.sh
- name: Coveralls
uses: coverallsapp/github-action@master
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
path-to-lcov: unittests/coverage.info.cleaned
build-doc:
runs-on: ubuntu-20.04
steps:
- uses: actions/checkout@v2
- name: update
run: sudo apt-get update -y
- name: Build Docker image
run: docker build . -f docker/docs/Dockerfile -t opae/docs-builder
- name: Build Documentation
run: docker run --rm -v ${{ github.workspace }}:/root opae/docs-builder ./scripts/build-documentation.sh
- name: Upload latest to github.io
if: ${{ github.ref == 'refs/heads/master' }}
working-directory: ${{ github.workspace }}/mybuild_docs
run: ${{ github.workspace }}/scripts/push-documentation.sh latest
- name: Upload tag to github.io
if: startsWith(github.ref, 'refs/tags/')
working-directory: ${{ github.workspace }}/mybuild_docs
run: ${{ github.workspace }}/scripts/push-documentation.sh "${GITHUB_TAG##*/}"
- name: Archive html docs
uses: actions/upload-artifact@v1
with:
name: docs
path: mybuild_docs/sphinx/html
- name: Link Checker
uses: peter-evans/link-checker@v1
with:
args: -v -r mybuild_docs/sphinx/html
- name: Archive link-checker result
uses: actions/upload-artifact@v1
with:
name: link-checker
path: link-checker
|
<gh_stars>1000+
name: ctf_story
description: A crypto challenge in Google CTF 2021.
environment:
sdk: '>=2.8.1 <3.0.0'
dependencies:
crclib:
git:
url: https://github.com/google/crclib.dart
ref: main
|
<gh_stars>1-10
# Copyright 2019 Xilinx Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
description: yolov3 detection on cityscapes dataset.
input size: 256*512
float ops: 5.46G
task: detection
framework: darknet
prune: '0.9'
version: 2.0
files:
- name: dk_yolov3_cityscapes_256_512_0.9_5.46G_2.0
type: float & quantized
board: GPU
download link: https://www.xilinx.com/bin/public/openDownload?filename=dk_yolov3_cityscapes_256_512_0.9_5.46G_2.0.zip
checksum: e35d55ac4d425cfdf561e86303616b3d
- name: yolov3_adas_pruned_0_9
type: xmodel
board: zcu102 & zcu104 & kv260
download link: https://www.xilinx.com/bin/public/openDownload?filename=yolov3_adas_pruned_0_9-zcu102_zcu104_kv260-r2.0.0.tar.gz
checksum: bda113a57f7746cb6e087b8dfe29a5c9
- name: yolov3_adas_pruned_0_9
type: xmodel
board: vck190
download link: https://www.xilinx.com/bin/public/openDownload?filename=yolov3_adas_pruned_0_9-vck190-r2.0.0.tar.gz
checksum: 50b3b6f3f6c360c4ca68f5d52e214858
- name: yolov3_adas_pruned_0_9
type: xmodel
board: vck50006pe-DPUCVDX8H-DWC
download link: https://www.xilinx.com/bin/public/openDownload?filename=yolov3_adas_pruned_0_9-vck50006pe-DPUCVDX8H-DWC-r2.0.0.tar.gz
checksum: 0910d142e854be31bf904056e05fbb86
- name: yolov3_adas_pruned_0_9
type: xmodel
board: vck50008pe-DPUCVDX8H
download link: https://www.xilinx.com/bin/public/openDownload?filename=yolov3_adas_pruned_0_9-vck50008pe-DPUCVDX8H-r2.0.0.tar.gz
checksum: ca485a7f3e5656562b08cb138c03a4d7
- name: yolov3_adas_pruned_0_9
type: xmodel
board: u50lv-DPUCAHX8H
download link: https://www.xilinx.com/bin/public/openDownload?filename=yolov3_adas_pruned_0_9-u50lv-DPUCAHX8H-r2.0.0.tar.gz
checksum: 605018c884ee598c1fdbab7e6a0cbf2f
- name: yolov3_adas_pruned_0_9
type: xmodel
board: u50lv-DPUCAHX8H-DWC & u55c-DPUCAHX8H-DW
download link: https://www.xilinx.com/bin/public/openDownload?filename=yolov3_adas_pruned_0_9-u55c-u50lv-DPUCAHX8H-DWC-r2.0.0.tar.gz
checksum: b7ecdf3f60da1a0c3ae222cf7b17c343
- name: yolov3_adas_pruned_0_9
type: xmodel
board: u200-DPUCADF8H & u250-DPUCADF8H
download link: https://www.xilinx.com/bin/public/openDownload?filename=yolov3_adas_pruned_0_9-u200-u250-r2.0.0.tar.gz
checksum: d49caa0180d0a260e2746cd07393626e
license: https://github.com/Xilinx/Vitis-AI/blob/master/LICENSE
|
# Copyright 2020 ETH Zurich and University of Bologna.
# Solderpad Hardware License, Version 0.51, see LICENSE for details.
# SPDX-License-Identifier: SHL-0.51
package:
name: snitch
authors: [ "<NAME> <<EMAIL>>" ]
dependencies:
common_cells: { git: "https://github.com/pulp-platform/common_cells.git", version: 1.19.0 }
sources:
- defines:
SNITCH_ENABLE_PERF: 1
files:
# packages
- src/riscv_instr.sv
- src/snitch_pkg.sv
- src/snitch_axi_pkg.sv
- src/snitch_icache/snitch_icache_pkg.sv
# rest of RTL
- src/snitch.sv
- src/snitch_regfile_ff.sv
# - src/snitch_regfile_latch.sv
- src/snitch_lsu.sv
- src/snitch_ipu.sv
- src/snitch_shared_muldiv.sv
- src/snitch_demux.sv
- src/snitch_axi_adapter.sv
- src/snitch_icache/snitch_icache.sv
- src/snitch_icache/snitch_icache_l0.sv
- src/snitch_icache/snitch_icache_handler.sv
- src/snitch_icache/snitch_icache_lfsr.sv
- src/snitch_icache/snitch_icache_lookup_parallel.sv
- src/snitch_icache/snitch_icache_lookup_serial.sv
- src/snitch_icache/snitch_icache_refill.sv
- src/snitch_read_only_cache/snitch_axi_to_cache.sv
- src/snitch_read_only_cache/snitch_read_only_cache.sv
|
<reponame>usmnzain/serv<gh_stars>1-10
name: CI
on: [push, pull_request]
jobs:
compliance:
name: RISC-V Compliance Test
runs-on: ubuntu-20.04
steps:
- uses: actions/checkout@v2
with:
path: serv
- name: install fusesoc, verilator and gcc
run: |
sudo apt-get install -y python3-setuptools verilator gcc-riscv64-unknown-elf
pip3 install fusesoc
- name: set SERV directory
run: echo "SERV=$GITHUB_WORKSPACE/serv" >> $GITHUB_ENV
- name: setup workspace
run: fusesoc library add serv $SERV
- name: build servant
run: fusesoc run --target=verilator_tb --build --build-root=servant_x servant
- name: download risc-v compliance
run: git clone https://github.com/riscv/riscv-compliance --branch 1.0
- name: run RV32i compliance tests
run: |
cd $GITHUB_WORKSPACE/riscv-compliance
make TARGETDIR=$SERV/riscv-target RISCV_TARGET=serv RISCV_DEVICE=rv32i RISCV_ISA=rv32i TARGET_SIM=$GITHUB_WORKSPACE/servant_x/verilator_tb-verilator/Vservant_sim
- name: run RV32Zicsr compliance tests
run: |
cd $GITHUB_WORKSPACE/riscv-compliance
make TARGETDIR=$SERV/riscv-target RISCV_TARGET=serv RISCV_DEVICE=rv32i RISCV_ISA=rv32Zicsr TARGET_SIM=$GITHUB_WORKSPACE/servant_x/verilator_tb-verilator/Vservant_sim
- name: run RV32Zifencei compliance tests
run: |
cd $GITHUB_WORKSPACE/riscv-compliance
make TARGETDIR=$SERV/riscv-target RISCV_TARGET=serv RISCV_DEVICE=rv32i RISCV_ISA=rv32Zifencei TARGET_SIM=$GITHUB_WORKSPACE/servant_x/verilator_tb-verilator/Vservant_sim
|
apiVersion: "apps/v1"
kind: "Deployment"
metadata:
name: "chal"
spec:
replicas: 1
template:
metadata:
annotations:
container.apparmor.security.beta.kubernetes.io/challenge: localhost/ctf-profile
spec:
containers:
- name: "challenge"
image: "challenge"
ports:
- containerPort: 1337
securityContext:
capabilities:
add: ["SYS_ADMIN"]
readOnlyRootFilesystem: true
command:
resources:
limits:
cpu: "0.9"
requests:
cpu: "0.45"
volumeMounts:
- name: "secrets"
mountPath: "/secrets"
readOnly: true
- name: "config"
mountPath: "/config"
readOnly: true
volumes:
- name: "secrets"
secret:
secretName: "secrets"
defaultMode: 0444
- name: "config"
configMap:
name: "config"
|
# Copyright 2019 Xilinx Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
description: detection & segmentation on bdd dataset.
input size: 288*512
float ops: 14.8G
task: detection&segmentation
framework: caffe
prune: 'no'
version: 1.4
files:
- name: cf_multitask_bdd_288_512_14.8G_1.4
type: float & quantized
board: GPU
download link: https://www.xilinx.com/bin/public/openDownload?filename=cf_multitask_bdd_288_512_14.8G_1.4.zip
checksum: ec63158ab603d1f1297909ab064670f8
- name: multi_task
type: xmodel
board: zcu102 & zcu104 & kv260
download link: https://www.xilinx.com/bin/public/openDownload?filename=multi_task-zcu102_zcu104_kv260-r1.4.0.tar.gz
checksum: fbdcf1fec7a0bd6f46fa82610dede0ff
- name: multi_task
type: xmodel
board: vck190
download link: https://www.xilinx.com/bin/public/openDownload?filename=multi_task-vck190-r1.4.0.tar.gz
checksum: 2c6bc5b2990c9732bad8ef28736b90d7
- name: multi_task
type: xmodel
board: vck5000
download link: https://www.xilinx.com/bin/public/openDownload?filename=multi_task-vck5000-DPUCVDX8H-r1.4.0.tar.gz
checksum: 0603ab9f7addd58488a79711172c1a3b
- name: multi_task
type: xmodel
board: u50-DPUCAHX8H & u50lv-DPUCAHX8H & u280-DPUCAHX8H
download link: https://www.xilinx.com/bin/public/openDownload?filename=multi_task-u50-u50lv-u280-DPUCAHX8H-r1.4.0.tar.gz
checksum: f77622254276d6c4694fdc4350595e29
- name: multi_task
type: xmodel
board: u50-DPUCAHX8L & u50lv-DPUCAHX8L & u280-DPUCAHX8L
download link: https://www.xilinx.com/bin/public/openDownload?filename=multi_task-u50-u50lv-u280-DPUCAHX8L-r1.4.0.tar.gz
checksum: 7ac5eacfe6413657ad7debe58d7efceb
license: https://github.com/Xilinx/Vitis-AI/blob/master/LICENSE
|
<reponame>jvanstraten/vhdmmio
metadata:
name: stream_monitor
brief: monitors a number of streams.
features:
bus-width: 32
optimize: yes
entity:
clock-name: axil_aclk
reset-name: axil_aresetn
reset-active: low
bus-prefix: axil_
bus-flatten: yes
interface:
flatten: yes
fields:
- repeat: 4 # <-- number of streams!
stride: 5
field-repeat: 1
subfields:
- address: 0
name: ecnt
doc: |
Accumulates the number of elements transferred on the stream. Writing to
the register subtracts the written value.
behavior: custom
interfaces:
- input: valid
- input: ready
- input: count:8 # <-- width of count field!
- input: dvalid
- input: last
- drive: ivalid
- drive: iready
- drive: itransfer
- drive: ipacket
- state: accum:32
pre-access: |
$s.ivalid$ := $s.valid$;
$s.iready$ := $s.ready$;
$s.itransfer$ := $s.valid$ and $s.ready$;
$s.ipacket$ := $s.valid$ and $s.ready$ and $s.last$;
if $s.valid$ = '1' and $s.ready$ = '1' and $s.dvalid$ = '1' then
if unsigned($s.count$) = 0 then
$s.accum$ := std_logic_vector(unsigned($s.accum$) + 2**$s.count$'length);
else
$s.accum$ := std_logic_vector(unsigned($s.accum$) + unsigned($s.count$));
end if;
end if;
read: |
$data$ := $s.accum$;
$ack$ := true;
write: |
$s.accum$ := std_logic_vector(unsigned($s.accum$) - unsigned($data$));
$ack$ := true;
post-access: |
if reset = '1' then
$s.accum$ := (others => '0');
end if;
- address: 4
name: vcnt
doc: |
Increments each cycle that the stream is valid. Writing to the register
subtracts the written value.
behavior: internal-counter
internal: ivalid
- address: 8
name: rcnt
doc: |
Increments each cycle that the stream is ready. Writing to the register
subtracts the written value.
behavior: internal-counter
internal: iready
- address: 12
name: tcnt
doc: |
Increments for each transfer on the stream, i.e. when it is handshaked.
Writing to the register subtracts the written value.
behavior: internal-counter
internal: itransfer
- address: 16
name: pcnt
doc: |
Increments each time the last signal is set during a handshake. Writing
to the register subtracts the written value.
behavior: internal-counter
internal: ipacket
|
language: go
go:
- 1.14.x
- 1.x
env:
- GO111MODULE=on
notifications:
email:
on_success: never
on_failure: change
before_install:
# Install linters and misspell
- curl -sSfL https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh | sh -s -- -b $(go env GOPATH)/bin ${GOLANGCI_LINT_VERSION}
- golangci-lint --version
install:
- go mod tidy
- git diff --exit-code go.mod
- git diff --exit-code go.sum
- go mod download
|
name: sv-test-env
channels:
- symbiflow
- pkgw-forge
- conda-forge
dependencies:
# - symbiflow::iverilog
# - symbiflow::moore
# - symbiflow::odin_ii
# - symbiflow::slang
# - symbiflow::surelog
# - symbiflow::sv-parser
# - symbiflow::tree-sitter-verilog
# - symbiflow::uhdm-integration-verilator
# - symbiflow::uhdm-integration-yosys
# - symbiflow::verible
# - symbiflow::verilator
# - symbiflow::yosys
# - symbiflow::antmicro-yosys
# - symbiflow::zachjs-sv2v
- ccache
- python=3.8
- pip
- symbiflow-yosys-plugins
- pip: # Packages installed from PyPI
- -r file:requirements.txt
|
# Copyright lowRISC contributors.
# Licensed under the Apache License, Version 2.0, see LICENSE for details.
# SPDX-License-Identifier: Apache-2.0
# Azure template for downloading pipeline step outputs and unpacking them.
#
# This template will download all artifacts from the upstream jobs listed in
# `downloadPartialBuildBinFrom` (which are expected to use
# upload-artifacts-template.yml) and unpack them.
#
# This template expects that a variable $BUILD_ROOT is set to a writeable
# directory; the results will be available in $BIN_DIR. See
# util/build_consts.sh for more information.
parameters:
# Names of jobs to download a partial $BIN_DIR from.
# List all "upstream" jobs here which produce files in $BIN_DIR which are
# needed in the current job. In other words, list all jobs which need to be
# executed and produce some build outputs before this job can start. The
# current job will find all outputs from those upstream jobs in $BIN_DIR and
# can use them.
- name: downloadPartialBuildBinFrom
type: object
default: []
steps:
- ${{ each job in parameters.downloadPartialBuildBinFrom }}:
- task: DownloadPipelineArtifact@2
inputs:
buildType: current
path: '$(BUILD_ROOT)/downloads/${{ job }}'
artifact: "partial-build-bin-${{ job }}"
displayName: Downloading partial build-bin directory from job ${{ job }}
- bash: |
set -e
test -n "$BUILD_ROOT"
. util/build_consts.sh
test -f "$BUILD_ROOT/upstream_bin_dir_contents.txt" && {
echo The download-artifacts-template.yml template can be called only once per job.
exit 1
}
mkdir -p "$BIN_DIR"
echo 'Extracting partial BIN_DIRs:'
find "$BUILD_ROOT/downloads" \
-name 'build-bin.tar' \
-exec \
tar -C "$BIN_DIR" \
--strip-components=1 \
-xvf {} \;
# Remember all files which were present in the upstream $BIN_DIRs.
find "$BIN_DIR" -type f -fprintf "$BUILD_ROOT/upstream_bin_dir_contents.txt" '%P\n'
echo
echo Upstream BIN_DIR contents:
echo vvvvvvvvvvvvvvvvvv
cat "$BUILD_ROOT/upstream_bin_dir_contents.txt"
echo ^^^^^^^^^^^^^^^^^^
displayName: Unpack upstream outputs
|
<reponame>gokhankici/iodine<gh_stars>1-10
resolver: lts-10.5
packages:
- .
- ./liquid-fixpoint
extra-deps:
- dotgen-0.4.2
- fgl-visualize-0.1.0.1
- located-base-0.1.1.1
- megaparsec-7.0.4
- parser-combinators-1.0.1
- hspec-2.7.0
- QuickCheck-2.12.6.1
- hspec-core-2.7.0
- hspec-discover-2.7.0
- yaml-0.11.0.0
- libyaml-0.1.1.0
|
<gh_stars>10-100
name: Test
on: push
jobs:
test:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v2
- uses: jodersky/setup-mill@master
with:
mill-version: 0.9.5
- name: Compile
run: mill __.compile
- name: Test
run: mill __.test
|
<reponame>AlSaqr-platform/cva6
package:
name: clint
authors:
dependencies:
cva6: { git: "<EMAIL>:minho-pulp/cva6.git" , rev: "7de8e9429f0cf38b8668e7c993d4acad63bbc4b1" }
sources:
- ./axi_lite_interface.sv
- ./clint.sv
|
<reponame>Datum-Technology-Corporation/core-v-mcu
apb_adv_timer:
incdirs: [
./rtl,
]
files: [
./rtl/adv_timer_apb_if.sv,
./rtl/comparator.sv,
./rtl/lut_4x4.sv,
./rtl/out_filter.sv,
./rtl/up_down_counter.sv,
./rtl/input_stage.sv,
./rtl/prescaler.sv,
./rtl/apb_adv_timer.sv,
./rtl/timer_cntrl.sv,
./rtl/timer_module.sv,
]
jg_slint_top_name: [
apb_adv_timer
]
jg_slint_elab_opt: [
]
jg_slint_postelab_cmds: [
]
jg_slint_clocks: [
HCLK,
]
jg_slint_resets: [
~HRESETn,
]
|
<reponame>captainko/vim-matchup
name: Neovim with Tree-sitter
on:
push:
branches:
- '*'
pull_request:
branches:
- master
jobs:
build:
strategy:
matrix:
neovim_version:
- 'head'
- 'v0.5.0'
runs-on: ubuntu-latest
env:
TESTS_ENABLE_TREESITTER: 1
steps:
- uses: 'actions/checkout@v2'
- name: Install vader.vim
run: git clone --depth=1 https://github.com/junegunn/vader.vim.git test/vader/vader.vim
- name: 'setup Neovim'
uses: 'thinca/action-setup-vim@v1'
with:
vim_version: '${{ matrix.neovim_version }}'
vim_type: 'Neovim'
- name: Install nvim-treesitter
run: git clone --depth=1 https://github.com/nvim-treesitter/nvim-treesitter.git test/vader/plugged/nvim-treesitter
- name: Install python treesitter module
run: nvim --headless -Nu test/vader/minvimrc -c 'TSInstallSync python' -c 'q'
- name: 'Show version'
run: nvim --version
- name: 'Run test'
run: |
bash -c 'VIMCMD=nvim test/vader/run'
- name: Install ruby treesitter module
run: nvim --headless -Nu test/vader/minvimrc -c 'TSInstallSync ruby' -c 'q'
- name: 'Run new tests'
run: |
cd ./test/new && make -j1 && make -j1 coverage
|
name: VHDL Testbenches
on: [push, pull_request]
jobs:
vhdl_testbenches:
runs-on: ubuntu-16.04
steps:
- uses: actions/checkout@v1
- name: Set up Python 3.7
uses: actions/setup-python@v1
with:
python-version: 3.7
- uses: actions/cache@v1
# Use Cache, see: https://github.com/actions/cache
if: startsWith(runner.os, 'Linux')
id: cache
with:
path: ~/.cache/pip
key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }}
restore-keys: |
${{ runner.os }}-pip-
- name: Install GHDL
run: |
sudo add-apt-repository universe
sudo add-apt-repository ppa:mati75/ghdl # or ppa:gekkio/ghdl
sudo apt-get update
sudo apt-get install ghdl*
- name: GHDL Check
run: ghdl --version
- name: Install Python Dependencies
run: |
pwd
python -m pip install --upgrade pip
pip install -r vivado/requirements.txt
- name: Test
run: |
pwd
cd vivado/NN_IP/EggNet_1.0/sim/MemCtrl/
python tb_MemCtrl_run_sim.py
|
<reponame>ess-dmsc/dmg-build-scripts
# Stop system under test
- hosts: pipeline_data_generator
tasks:
- name: check if efu data generator finished
async_status:
jid: "{{ efu_generator.ansible_job_id }}"
register: efu_generator_result
until: efu_generator_result.finished
retries: "{{ integration_test_num_retries }}"
ignore_errors: yes
- name: print efu data generator stdout
debug:
msg: "{{ efu_generator_result.stdout_lines }}"
- name: print efu data generator stderr
debug:
msg: "{{ efu_generator_result.stderr_lines }}"
- hosts: test_orchestrator
tasks:
- name: check if file writer command finished
async_status:
jid: "{{ file_writer_command.ansible_job_id }}"
register: file_writer_command_result
until: file_writer_command_result.finished
retries: "{{ integration_test_num_retries }}"
ignore_errors: yes
- name: print file writer command stdout
debug:
msg: "{{ file_writer_command_result.stdout_lines }}"
- name: print file writer command stderr
debug:
msg: "{{ file_writer_command_result.stderr_lines }}"
- hosts: efu
tasks:
- name: wait for pipeline to finish processing data
pause:
seconds: 5
- name: check efu pipeline for correct counter values
shell: "{{ event_formation_unit_base_dir }}/event-formation-unit/util/efushell/verifymetrics.py {{ integration_test_efu_metrics_verification_string }}"
register: efu_pipeline_counters_result
ignore_errors: yes
- name: print efu pipeline counter check stdout
debug:
msg: "{{ efu_pipeline_counters_result.stdout_lines }}"
- name: print efu pipeline counter check stderr
debug:
msg: "{{ efu_pipeline_counters_result.stderr_lines }}"
|
<gh_stars>1-10
name: Notice
on:
push:
paths:
- stack.yaml
- stack.yaml.lock
- sv2v.cabal
- notice.sh
- NOTICE
jobs:
notice:
runs-on: macOS-latest
steps:
- uses: actions/checkout@v1
- name: Install Haskell Stack
run: brew install haskell-stack
- name: Regenerate NOTICE
run: ./notice.sh > NOTICE
- name: Validate NOTICE
run: |
if [ -n "`git status --porcelain`" ]; then
echo "Notice differs!"
PAGER= git diff
exit 1
fi
|
<filename>plugins/simulators/vcs/buildInOptions/vcs_options.yaml<gh_stars>1-10
options:
quite_comp:
usage:
quite compiling with -q, and close lint with +lint=none
on_action:
compile_option:
- -q
- +lint=none
wave:
usage:
dump waveform, vaule is format[FSDB, VPD], use macro[DUMP_FSDB, DUMP_VPD] in your testbench, default is VPD
on_action:
compile_option:
- -lca -debug_access+pp
- +define+DUMP_VPD
with_value_action:
compile_option:
- -lca -debug_access+pp
- +define+DUMP$wave
|
---
name: trenz_teb080x_te803_dma_example
board: boards/teb803
version: 0.1.1
cores:
- fpga/cores/axi_ctl_register_v1_0
- fpga/cores/axi_sts_register_v1_0
memory:
- name: control
offset: '0xA4001000'
range: 4K
- name: status
offset: '0xA4002000'
range: 4K
- name: xadc
offset: '0xA4003000'
range: 64K
- name: ram_mm2s
offset: '0x60000000'
range: 256M
- name: ram_s2mm
offset: '0x70000000'
range: 256M
- name: dma
offset: '0xA4010000'
range: 64K
- name: ocm_mm2s
offset: '0xFFFF0000'
range: 32K
- name: ocm_s2mm
offset: '0xFFFF8000'
range: 32K
- name: sclr
offset: '0xF8000000'
range: 64K
control_registers:
- led
status_registers:
- forty_two
parameters:
fclk0: 100000000 # FPGA clock speed in Hz
xdc:
- ./../../../boards/teb803/config/ports.xdc
- ./constraints.xdc
drivers:
- server/drivers/common.hpp
- ./dma_controller.hpp
- ./led_blinker.hpp
web:
- ./web/index.html
- web/koheron.ts
- web/led-blinker.ts
- ./web/app.ts
- web/main.css
|
<gh_stars>1-10
# Password is SHA-256 encoded value for "<PASSWORD>"
User(joel):
email: <EMAIL>
password: <PASSWORD>
role: admin
|
<filename>ariane/src/axi/src_files.yml
axi:
files:
- src/axi_pkg.sv
- src/axi_test.sv
- src/axi_intf.sv
- src/axi_to_axi_lite.sv
- src/axi_lite_to_axi.sv
- src/axi_lite_xbar.sv
- src/axi_arbiter.sv
- src/axi_address_resolver.sv
- src/axi_join.sv
- src/axi_lite_join.sv
|
<filename>ips/adv_dbg_if/src_files.yml
adv_dbg_if:
incdirs: [
rtl,
]
files: [
rtl/adbg_axi_biu.sv,
rtl/adbg_axi_module.sv,
rtl/adbg_lint_biu.sv,
rtl/adbg_lint_module.sv,
rtl/adbg_crc32.v,
rtl/adbg_or1k_biu.sv,
rtl/adbg_or1k_module.sv,
rtl/adbg_or1k_status_reg.sv,
rtl/adbg_top.sv,
rtl/bytefifo.v,
rtl/syncflop.v,
rtl/syncreg.v,
rtl/adbg_tap_top.v,
rtl/adv_dbg_if.sv,
rtl/adbg_axionly_top.sv,
rtl/adbg_lintonly_top.sv,
]
jg_slint_top_name: [
adbg_top
]
jg_slint_elab_opt: [
]
jg_slint_postelab_cmds: [
]
jg_slint_clocks: [
tck_i,
axi_aclk,
]
jg_slint_resets: [
~trstn_i,
~axi_aresetn,
]
|
<gh_stars>10-100
udma_qspi:
incdirs: [
rtl,
]
files: [
rtl/udma_spim_reg_if.sv,
rtl/udma_spim_ctrl.sv,
rtl/udma_spim_txrx.sv,
rtl/udma_spim_top.sv,
]
|
# Copyright 2021 ETH Zurich and University of Bologna.
# Solderpad Hardware License, Version 0.51, see LICENSE for details.
# SPDX-License-Identifier: SHL-0.51
package:
name: snitch_read_only_cache
authors:
- <NAME> <<EMAIL>>
- <NAME> <<EMAIL>>
dependencies:
axi: {path: ../../vendor/pulp_platform_axi}
common_cells: {path: ../../vendor/pulp_platform_common_cells}
tech_cells_generic: {path: ../../vendor/pulp_platform_tech_cells_generic}
snitch_icache: {path: ../snitch_icache}
sources:
# Level 0:
- src/snitch_axi_to_cache.sv
- src/snitch_read_only_cache.sv
- target: test
files:
- test/snitch_read_only_cache_tb.sv
|
TMVAxml: bdtWeights/TMVAClassification_BDT.weights.xml
OutputDir: test-l1PFTau
ProjectName: l1PFTau
XilinxPart: xc7vx690tffg1927-2
ClockPeriod: 5
IOType: io_parallel # options: io_serial/io_parallel
ReuseFactor: 1
DefaultPrecision: ap_fixed<18,8>
|
<reponame>tblink-rpc/tblink-rpc-examples
package:
name: tblink-rpc-examples
version: 0.0.1
dev-deps:
- name: cython
src: pypi
- name: vlsim
src: pypi
- name: tblink-rpc-hdl
url: https://github.com/tblink-rpc/tblink-rpc-hdl.git
- name: tblink-rpc-utils
url: https://github.com/tblink-rpc/tblink-rpc-utils.git
- name: mkdv
url: https://github.com/fvutils/mkdv.git
- name: pytblink-rpc
url: https://github.com/tblink-rpc/pytblink-rpc.git
- name: tblink-bfms-rv
url: https://github.com/tblink-bfms/tblink-bfms-rv.git
- name: cocotb
src: pypi
|
language: python
- "3.6"
services:
- docker
before_install:
- docker pull ghdl/ext:vunit-master
- docker run -dt --name=vunit ghdl/ext:vunit-master /bin/sh
- docker exec vunit /bin/sh -c 'apt-get install -y git'
- docker exec vunit /bin/sh -c 'git --version'
- docker exec vunit /bin/sh -c 'git clone --depth 1 https://github.com/MatthieuMichon/vhld-unit-test.git'
script:
- docker exec vunit /bin/sh -c 'cd ./vhld-unit-test/; ./tests/run_vunit.py'
- docker stop vunit
- docker rm vunit
|
rv_plic:
files:
- ../ariane/register_interface/src/reg_intf_pkg.sv
- rtl/top_pkg.sv
- rtl/tlul_pkg.sv
- rtl/rv_plic_reg_pkg.sv
- rtl/prim_subreg_ext.sv
- rtl/plic_regmap.sv
- rtl/prim_subreg.sv
- rtl/rv_plic_reg_top.sv
- rtl/rv_plic_gateway.sv
- rtl/rv_plic_target.sv
- rtl/rv_plic.sv
- rtl/plic_top.sv
|
<filename>.github/workflows/docker.yml<gh_stars>10-100
name: Docker
on:
push:
branches:
- master
- docker
jobs:
build:
runs-on: ubuntu-latest
if: "!contains(github.event.head_commit.message, 'skip ci')"
steps:
- uses: actions/checkout@v2
- name: Checkout submodules
shell: bash
run: |
auth_header="$(git config --local --get http.https://github.com/.extraheader)"
git submodule sync --recursive
git -c "http.extraheader=$auth_header" -c protocol.version=2 submodule update --init --force --recursive --depth=1
- name: Build wheel
shell: bash
env:
BUILD_WHEEL: true
OS: linux
run: |
source ./scripts/ci.sh
- name: Build docker
shell: bash
run: |
docker build -f docker/Dockerfile . -t keyiz/kratos
docker login -u $DOCKER_USERNAME -p $DOCKER_PASSWORD
docker push keyiz/kratos:latest
docker tag keyiz/kratos docker.pkg.github.com/kuree/kratos/py38:latest
docker login docker.pkg.github.com -u $GITHUB_USERNAME -p $GITHUB_TOKEN
docker push docker.pkg.github.com/kuree/kratos/py38:latest
# build the full one
docker build -f docker/Dockerfile-Cad . -t keyiz/kratos-full
docker push keyiz/kratos-full:latest
env:
GITHUB_USERNAME: ${{ secrets.GITHUB_USERNAME }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }}
- uses: actions/upload-artifact@v1
with:
name: py3.7 wheel
path: wheelhouse/
|
name: trigger-lint
on:
pull_request:
jobs:
upload_event_file:
runs-on: ubuntu-latest
steps:
- run: cp "$GITHUB_EVENT_PATH" ./event.json
- name: Upload event file as artifact
uses: actions/upload-artifact@v2
with:
name: event.json
path: event.json
|
<filename>tests/fpga_system_tests/emu/simctrl.pre.yaml
digital_ctrl_inputs:
rstb:
abspath: 'tb_i.rstb'
width: 1
init_value: 0
dump_start:
abspath: 'tb_i.dump_start'
width: 1
init_value: 0
tdi:
abspath: 'tb_i.tdi'
width: 1
init_value: 0
tck:
abspath: 'tb_i.tck'
width: 1
init_value: 0
tms:
abspath: 'tb_i.tms'
width: 1
init_value: 1
trst_n:
abspath: 'tb_i.trst_n'
width: 1
init_value: 0
jitter_rms_int:
abspath: 'tb_i.jitter_rms_int'
width: 7
init_value: 0
noise_rms_int:
abspath: 'tb_i.noise_rms_int'
width: 11
init_value: 0
prbs_eqn:
abspath: 'tb_i.prbs_eqn'
width: 32
init_value: 1048578 # hex value: 0x100002
chan_wdata_0:
abspath: 'tb_i.chan_wdata_0'
width: 18
init_value: 0
chan_wdata_1:
abspath: 'tb_i.chan_wdata_1'
width: 18
init_value: 0
chan_waddr:
abspath: 'tb_i.chan_waddr'
width: 9
init_value: 0
chan_we:
abspath: 'tb_i.chan_we'
width: 1
init_value: 0
digital_ctrl_outputs:
tdo:
abspath: 'tb_i.tdo'
width: 1
digital_probes:
ctl_pi_0:
abspath: 'tb_i.top_i.iacore.ctl_pi[0]'
width: 9
adcout_unfolded_0:
abspath: 'tb_i.top_i.idcore.adcout_unfolded[0]'
width: 8
signed: 1
estimated_bits_0:
abspath: 'tb_i.top_i.idcore.estimated_bits[0]'
width: 10
signed: 1
# clock inputs and outputs for the four PIs
# clk_i_pi_0:
# abspath: 'tb_i.top_i.iacore.iPI[0].iPI.clk_in'
# width: 1
# clk_o_pi_0:
# abspath: 'tb_i.top_i.iacore.iPI[0].iPI.clk_out_slice'
# width: 1
# clk_i_pi_1:
# abspath: 'tb_i.top_i.iacore.iPI[1].iPI.clk_in'
# width: 1
# clk_o_pi_1:
# abspath: 'tb_i.top_i.iacore.iPI[1].iPI.clk_out_slice'
# width: 1
# clk_i_pi_2:
# abspath: 'tb_i.top_i.iacore.iPI[2].iPI.clk_in'
# width: 1
# clk_o_pi_2:
# abspath: 'tb_i.top_i.iacore.iPI[2].iPI.clk_out_slice'
# width: 1
# clk_i_pi_3:
# abspath: 'tb_i.top_i.iacore.iPI[3].iPI.clk_in'
# width: 1
# clk_o_pi_3:
# abspath: 'tb_i.top_i.iacore.iPI[3].iPI.clk_out_slice'
# width: 1
|
<reponame>thirono/basil<gh_stars>10-100
# Device description for a simulated device for SCPI HL + VISA TL test.
identifier : "LSG Serial #1234"
get_name : "?IDN" # overwrite default SCPI
set_on : "!OUT"
get_on : "?OUT"
get_frequency : "?FREQ"
|
# This file defines single architecture set for tlut systolic array performance projection
- proj_16_16_bank4_block32
- proj_32_32_bank4_block32
- proj_64_64_bank4_block32
- proj_128_128_bank4_block32
|
<reponame>stillwater-sc/PoC<filename>.appveyor.yml<gh_stars>100-1000
version: 1.1.0-b{build}
clone_folder: c:\projects\poc
init:
# Checking that DEP is enabled
- ps: Write-Host "Initializing virtual machine ..."
- ps: git --version
- ps: $env:Path = $env:Path.Replace("Python27", "Python35-x64")
install:
- ps: Write-Host "Installing requirements..."
- ps: python --version
- ps: python -m pip install pip --upgrade
- ps: python -m pip install -r .\requirements.txt
- ps: python -m pip list --format=columns
- ps: Write-Host "Configuring PoC..."
- ps: cp .\tools\AppVeyor\config.private.ini .\py\
- ps: cp .\tools\AppVeyor\my_project.vhdl .\tb\common\
- ps: mkdir .\temp\precompiled\vsim\
- ps: cp .\tools\AppVeyor\modelsim.ini .\temp\precompiled\vsim\
- ps: .\poc.ps1 info
build: off
build_script:
- ps: Write-Host "Testing query interface..."
- ps: .\poc.ps1 query INSTALL.PoC:InstallationDirectory
- ps: .\Tools\AppVeyor\PoC.list.ps1
- ps: .\Tools\AppVeyor\PoC.dryrun.ps1
|
<filename>.gitlab-ci.yml
stages:
- initial_report
- quick_checks
- midway_report
- builds
- publish
- final_report
variables:
GITHUB_REPO_API_URL: "https://api.github.com/repos/ipbus/ipbus-firmware"
IPBUS_DOCKER_REGISTRY: "gitlab-registry.cern.ch/ipbus/ipbus-docker"
include:
- local: '/ci/github-notifications.yml'
- local: '/ci/templates.yml'
- local: '/ci/sim.yml'
- local: '/ci/publish.yml'
- local: '/ci/vivado-checks.yml'
- local: '/ci/vivado-builds.yml'
|
# Run in container-based environment
sudo: required
dist: trusty
group: edge # Add this
# Using the precompiled toolchain from docker
services:
- docker
# define test
env:
global:
- DOCKER_TOP=/opt/lowrisc
- DOCKER_TAG=minion-v0.4
matrix:
- CONFIG=DefaultConfig TEST_CASE=run-asm-tests
- CONFIG=TagConfig TEST_CASE=run-asm-tests
- CONFIG=TagConfig TEST_CASE=run-tag-tests
# actual test build
before_install:
- docker pull lowrisc/lowrisc-docker:$DOCKER_TAG
before_script:
- docker create -v $PWD:/opt/lowrisc -e TOP=/opt/lowrisc -t --name=test lowrisc/lowrisc-docker:$DOCKER_TAG
- docker start test
script:
- docker exec test make -C $DOCKER_TOP/vsim CONFIG=$CONFIG $TEST_CASE
after_script:
- docker stop test
- docker rm test
|
<filename>models/AI-Model-Zoo/model-list/cf_densebox_wider_360_640_1.11G_1.4/model.yaml
# Copyright 2019 Xilinx Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
description: face detection model.
input size: 360*640
float ops: 1.11G
task: face detection
framework: caffe
prune: 'no'
version: 1.4
files:
- name: cf_densebox_wider_360_640_1.11G_1.4
type: float & quantized
board: GPU
download link: https://www.xilinx.com/bin/public/openDownload?filename=cf_densebox_wider_360_640_1.11G_1.4.zip
checksum: e7a2fb60638909db368ab6bb6fa8283e
- name: densebox_640_360
type: xmodel
board: zcu102 & zcu104 & kv260
download link: https://www.xilinx.com/bin/public/openDownload?filename=densebox_640_360-zcu102_zcu104_kv260-r1.4.0.tar.gz
checksum: 101bce699b9dada0e97fdf0c95aa809f
- name: densebox_640_360
type: xmodel
board: vck190
download link: https://www.xilinx.com/bin/public/openDownload?filename=densebox_640_360-vck190-r1.4.0.tar.gz
checksum: 101c3c36dec1ffd9291126fcd365fbc0
- name: densebox_640_360
type: xmodel
board: vck5000
download link: https://www.xilinx.com/bin/public/openDownload?filename=densebox_640_360-vck5000-DPUCVDX8H-r1.4.0.tar.gz
checksum: 88df124f009f6f5c7fa3f9ed2c380a99
- name: densebox_640_360
type: xmodel
board: u50-DPUCAHX8H & u50lv-DPUCAHX8H & u280-DPUCAHX8H
download link: https://www.xilinx.com/bin/public/openDownload?filename=densebox_640_360-u50-u50lv-u280-DPUCAHX8H-r1.4.0.tar.gz
checksum: 4e7a026d70c87051f1e086d5011b1320
- name: densebox_640_360
type: xmodel
board: u200-DPUCADF8H & u250-DPUCADF8H
download link: https://www.xilinx.com/bin/public/openDownload?filename=densebox_640_360-u200-u250-r1.4.0.tar.gz
checksum: 9ffe55e2515cb7aacf29b3ef7234fc2e
license: https://github.com/Xilinx/Vitis-AI/blob/master/LICENSE
|
image: docker:latest
services:
- docker:dind
stages:
- check
- test
- examples
.env: &env
GHDL_IMAGE: ghdl/ghdl
GHDL_TAG: ubuntu18-llvm-5.0
variables:
<<: *env
.ghdl-check-job: &ghdl-check-job
stage: check
image: $GHDL_IMAGE:$GHDL_TAG
script:
- find hardware -name "*.vhd" ! -path "*/test/*" |
xargs ghdl -i -v --std=${STD:-08} |
grep entity |
sed -e 's/entity //' |
sed -e 's/ \*\*//' |
xargs -L 1 ghdl -m --std=${STD:-08} -frelaxed-rules --ieee=synopsys
check-vhdl-93c:
<<: *ghdl-check-job
variables:
<<: *env
STD: 93c
check-vhdl-08:
<<: *ghdl-check-job
test-vhdl-08:
<<: *ghdl-check-job
stage: test
allow_failure: true
script:
- find hardware -name "*.vhd" |
xargs ghdl -i -v --std=${STD:-08} |
grep entity |
grep _tb |
sed -e 's/entity //' |
sed -e 's/ \*\*//' |
xargs -i -t bash -c '
ghdl -m --std=${STD:-08} -frelaxed-rules --ieee=synopsys {};
ghdl -r --std=${STD:-08} --ieee=synopsys {} --stop-time=100ns'
|
name: 'test'
on:
push:
pull_request:
schedule:
- cron: '0 15 * * *'
env:
CI: true
jobs:
lin-docker:
runs-on: ubuntu-latest
env:
DOCKER_BUILDKIT: 1
steps:
- uses: actions/checkout@v2
- run: docker run --rm -v $(pwd):/src -w /src ghdl/vunit:llvm-master ./test.sh
- uses: actions/upload-artifact@v2
with:
name: waves
path: ./*.ghw
win-setup:
runs-on: windows-latest
defaults:
run:
shell: msys2 {0}
steps:
- uses: msys2/setup-msys2@v2
with:
msystem: MINGW64
update: true
- uses: actions/checkout@v2
- uses: ghdl/setup-ghdl-ci@master
with:
backend: llvm
- run: ./test.sh
|
<filename>ips_list.yml
#
# List of IPs and relative branch/commit-hash/tag.
# Uses the YAML syntax.
#
axi/axi2mem:
commit: 6973e0434d26ba578cdb4aa69c26c1facd1a3f15
domain: [soc, cluster]
group: pulp-platform
axi/axi2per:
commit: tags/v1.0.1
domain: [cluster]
group: pulp-platform
axi/per2axi:
commit: v1.0.4
domain: [soc, cluster]
group: pulp-platform
axi/axi_size_conv:
commit: 5239f87fe772111ec368fb08dbb971516edce097
domain: [cluster]
group: pulp-platform
cluster_interconnect:
commit: tags/v1.1.0
domain: [cluster]
group: pulp-platform
event_unit_flex:
commit: 1.4.1
domain: [cluster]
group: pulp-platform
mchan:
commit: tags/v1.2.2
domain: [cluster]
group: pulp-platform
hier-icache:
commit: v1.2.0
domain: [cluster]
group: pulp-platform
icache-intc:
commit: tags/v1.0.1
domain: [cluster]
group: pulp-platform
icache_mp_128_pf:
commit: 6f2e54102001230db9c82432bf9e011842419a48
domain: [cluster]
group: pulp-platform
icache_private:
commit: 1d4cdbcbec3ab454c09d378fc55631b60450fccd
domain: [cluster]
group: pulp-platform
hwpe-ctrl:
commit: v1.5
group: pulp-platform
domain: [soc, cluster, pulp]
hwpe-stream:
commit: v1.6.1
group: pulp-platform
domain: [soc, cluster, pulp]
hci:
commit: v1.0.6
group: pulp-platform
domain: [cluster, pulp]
cluster_peripherals:
commit: v2.1.0
domain: [cluster]
group: pulp-platform
fpu_interco:
commit: <PASSWORD>
domain: [soc, cluster]
group: pulp-platform
hwpe-datamover-example:
commit: v1.0
group: pulp-platform
domain: [cluster, pulp]
|
<reponame>Calculasians/HDC-Sensor-Fusion-Research
sim.inputs.top_module: "hdc_sensor_fusion"
sim.inputs.tb_dut: "dut"
sim.inputs.tb_name: "hdc_sensor_fusion_tb"
sim.inputs.input_files_meta: "append"
sim.inputs.input_files:
- "src/HDC_Sensor_Fusion_Rule90/hdc_sensor_fusion.sv"
- "src/HDC_Sensor_Fusion_Rule90/hdc_sensor_fusion_tb.sv"
- "src/HDC_Sensor_Fusion_Rule90/associative_memory.sv"
- "src/HDC_Sensor_Fusion_Rule90/hv2000_binary_adder.sv"
- "src/HDC_Sensor_Fusion_Rule90/fuser.v"
- "src/HDC_Sensor_Fusion_Rule90/spatial_encoder.v"
- "src/HDC_Sensor_Fusion_Rule90/temporal_encoder.v"
- "src/HDC_Sensor_Fusion_Rule90/hv_generator.sv"
sim.inputs:
timescale: "1ns/1ps"
options:
- "-notice"
- "-line"
- "-debug_pp"
- "-debug_all"
- "+v2k"
- "+lint=all,noVCDE"
- "+incdir+../../src/HDC_Sensor_Fusion_Rule90"
- "+define+CLOCK_PERIOD=3.3"
- "-sverilog"
execute_sim: true
execution_flags: ["+verbose=1"]
|
<reponame>IanBoyanZhang/kratos<filename>.github/workflows/macos.yml
name: MacOS Wheel Test
on: [push]
jobs:
build:
runs-on: macos-latest
if: "!contains(github.event.head_commit.message, 'skip ci')"
steps:
- uses: actions/checkout@v2
- name: Checkout submodules
shell: bash
run: |
auth_header="$(git config --local --get http.https://github.com/.extraheader)"
git submodule sync --recursive
git -c "http.extraheader=$auth_header" -c protocol.version=2 submodule update --init --force --recursive --depth=1
- name: Setup Env
uses: conda-incubator/setup-miniconda@v2
with:
auto-update-conda: true
python-version: 3.7
- name: Install verilator
shell: bash
run: |
brew install verilator
verilator --version
- name: Install Python packages
shell: bash -l {0}
run: |
pip install wheel pytest
- name: Build and run wheel
shell: bash -l {0}
env:
OS: osx
PYPI_PASSWORD: ${{ secrets.PYPI_PASSWORD }}
# miniconda uses 10.9
MACOSX_DEPLOYMENT_TARGET: 10.9
KRATOS_DEBUG: 1
run: |
source ./scripts/ci.sh
- uses: actions/upload-artifact@v1
with:
name: py3.7 wheel
path: dist/
|
<reponame>mundaym/ibex
# This is a basic workflow to help you get started with Actions
name: metrics-regress
# Controls when the action will run. Triggers the workflow on push or pull request
# events but only for the master branch
on:
push:
branches: [ master ]
# pull_request_target:
# branches: [ master ]
# If you fork this repository, you must create a new Metrics project for your fork
# and set the environment variable $METRICS_PROJECT_ID accordingly
jobs:
metrics-regression:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- run: ./scripts/metrics-regress.py $METRICS_REGRESSION_NAME $METRICS_PROJECT_ID
env:
METRICS_CI_TOKEN: ${{ secrets.METRICS_CI_TOKEN }}
METRICS_REGRESSION_NAME: riscv-dv_regression
METRICS_PROJECT_ID: ${{ secrets.METRICS_PROJECT_ID }}
PR_NUMBER: ${{ github.event.pull_request.number }}
shell: bash
|
<reponame>slaclab/lcls2-pcie-aps<filename>firmware/releases.yaml
GitBase: ..
TopRoguePackage: lcls2_timetool
RoguePackages:
- common/python
RogueScripts:
- ../software/scripts/timetoolGui
CondaDependencies:
- surf=v2.5.1
- axi_pcie_core=v3.2.1
- lcls_timing_core=v3.2.1
- lcls2_pgp_fw_lib=v3.3.0
- l2si_core=v3.1.0
- clink_gateway_fw_lib=v2.1.1
- matplotlib
RogueConfig:
Targets:
TimeToolKcu1500:
ImageDir: targets/TimeToolKcu1500/images
Extensions:
- mcs
Releases:
TimeToolKcu1500:
Primary: True
Targets:
- TimeToolKcu1500
Types:
- Rogue
|
version: 1
disable_existing_loggers: true
formatters:
default:
format: "%(asctime)s.%(msecs)03d %(levelname)s:\t%(message)s"
datefmt: "%Y-%m-%d %H:%M:%S"
handlers:
console:
class : logging.StreamHandler
formatter: default
level : INFO
filters: [allow_foo]
stream : ext://sys.stdout
file:
class : logging.handlers.RotatingFileHandler
formatter: default
filename: /payload/logs/payload_dispatcher.log
maxBytes: 1000000
backupCount: 3
loggers:
root:
level: INFO
handlers: [console, file]
gmqtt:
level: INFO
handlers: [console, file]
propagate: yes
|
.template_github_commit_status:
image: cern/cc7-base:20170113
tags:
- docker
only:
- branches
- web
dependencies: []
github_commit_status:start:
extends: .template_github_commit_status
stage: initial_report
when: always
script:
- "curl -H \"Authorization: token ${GITHUB_API_TOKEN}\" --data '{\"state\" : \"pending\", \"target_url\" : \"'\"${CI_PROJECT_URL}\"'/pipelines/'\"${CI_PIPELINE_ID}\"'\", \"description\" : \"CI pipeline has started\", \"context\" : \"gitlab-ci-quick\"}' ${GITHUB_REPO_API_URL}/statuses/${CI_COMMIT_SHA}"
- "if [[ $CI_COMMIT_REF_NAME =~ ^pull-requests/[0-9]+$ ]]; then curl -H \"Authorization: token ${GITHUB_API_TOKEN}\" --data '{\"state\" : \"pending\", \"target_url\" : \"'\"${CI_PROJECT_URL}\"'/pipelines/'\"${CI_PIPELINE_ID}\"'\", \"description\" : \"CI pipeline has started\", \"context\" : \"gitlab-ci\"}' ${GITHUB_REPO_API_URL}/statuses/${CI_COMMIT_SHA} ; fi"
github_commit_status:midway:success:
extends: .template_github_commit_status
stage: midway_report
when: on_success
script:
- "curl -H \"Authorization: token ${GITHUB_API_TOKEN}\" --data '{\"state\" : \"success\", \"target_url\" : \"'\"${CI_PROJECT_URL}\"'/pipelines/'\"${CI_PIPELINE_ID}\"'\", \"description\" : \"CI quick jobs completed successfully!\", \"context\" : \"gitlab-ci-quick\"}' ${GITHUB_REPO_API_URL}/statuses/${CI_COMMIT_SHA}"
github_commit_status:end:failure:
extends: .template_github_commit_status
stage: final_report
when: on_failure
script:
- "curl -H \"Authorization: token ${GITHUB_API_TOKEN}\" --data '{\"state\" : \"failure\", \"target_url\" : \"'\"${CI_PROJECT_URL}\"'/pipelines/'\"${CI_PIPELINE_ID}\"'\", \"description\" : \"Jobs have failed in CI pipeline\", \"context\" : \"gitlab-ci\"}' ${GITHUB_REPO_API_URL}/statuses/${CI_COMMIT_SHA}"
github_commit_status:end:success:
extends: .template_github_commit_status
stage: final_report
only:
- /^pull-requests.*$/
when: on_success
script:
- "curl -H \"Authorization: token ${GITHUB_API_TOKEN}\" --data '{\"state\" : \"success\", \"target_url\" : \"'\"${CI_PROJECT_URL}\"'/pipelines/'\"${CI_PIPELINE_ID}\"'\", \"description\" : \"CI pipeline completed successfully!\", \"context\" : \"gitlab-ci\"}' ${GITHUB_REPO_API_URL}/statuses/${CI_COMMIT_SHA}"
|
<reponame>Calculasians/HDC-Sensor-Fusion-Research<gh_stars>0
sim.inputs.tb_name: "hdc_sensor_fusion_tb"
sim.inputs.input_files_meta: "append"
sim.inputs.input_files:
- "src/../build/par-rundir/hdc_sensor_fusion.sim.v"
- "src/HDC_Sensor_Fusion_SEFUAMFoldedRule90/hdc_sensor_fusion_tb.sv"
sim.inputs.level: "gl"
sim.inputs:
timing_annotated: true
timescale: "1ns/1ps"
options:
- "-notice"
- "-line"
- "+v2k"
- "+lint=all,noVCDE"
- "+incdir+../../src/HDC_Sensor_Fusion_SEFUAMFoldedRule90"
- "+define+CLOCK_PERIOD=1100"
- "-sverilog"
- "-debug_acc+pp"
- "-debug_acc+all"
- "-debug_region+cell+encrypt"
execute_sim: true
execution_flags: ["+verbose=1"]
|
<reponame>recogni/recogni-ariane
package:
name: axi_riscv_atomics
authors: ["<NAME> <<EMAIL>>", "<NAME> <<EMAIL>>"]
dependencies:
# TODO: Once the special features we need from our dependencies are released in a tagged version,
# switch to that version for SemVer compatibility.
axi: { git: "https://github.com/pulp-platform/axi.git", version: 0.6.0 }
common_cells: { git: "https://github.com/pulp-platform/common_cells.git", version: 1.10.0 }
sources:
# Source files grouped in levels. Files in level 0 have no dependencies on files in this package.
# Files in level 1 only depend on files in level 0, files in level 2 on files in levels 1 and 0,
# etc. Files within a level are ordered alphabetically.
# Level 0
- src/axi_res_tbl.sv
- src/axi_riscv_amos_alu.sv
# Level 1
- src/axi_riscv_amos.sv
- src/axi_riscv_lrsc.sv
# Level 2
- src/axi_riscv_atomics.sv
- src/axi_riscv_lrsc_wrap.sv
# Level 3
- src/axi_riscv_atomics_wrap.sv
|
hosts:
h1 : {}
h2 : {}
switches:
s1:
cfg: ../../build/bmv2/FirstHalfWithoutCompression.json
interfaces:
- link: h1
- link: s2
- link: s1compress
cmds:
- table_add forward set_egress 0 => 1
- table_add forward set_egress 2 => 1
- table_add forward set_egress 1 => 0
- table_add compression_offload.offloaded_port compression_offload.is_offload_port 2 => 1
- table_add egress_compression.port_compression egress_compression.set_port_compression 1 => 1
- ../controller_files/fec_encoder_commands.txt
s1compress:
cfg: ../../build/bmv2/Compress.json
interfaces:
- link: s1
cmds:
- table_add forward set_egress 0 => 0
s2:
cfg: ../../build/bmv2/Dropper.json
interfaces:
- link: s1
- link: s3
replay:
s1: ../pcaps/lldp_enable_fec.pcap
s3: ../pcaps/lldp_enable_fec.pcap
cmds:
- ../controller_files/forwarding_commands.txt
- table_add dropper set_drop_rate 1 => 3 5
- table_add dropper set_drop_rate 0 => 3 5
s3:
cfg: ../../build/bmv2/SecondHalf.json
interfaces:
- link: s2
- link: h2
cmds:
- ../controller_files/decompressor_commands.txt
- ../controller_files/forwarding_commands.txt
- ../controller_files/fec_decoder_commands.txt
|
<gh_stars>1-10
# Copyright 2020 ETH Zurich and University of Bologna.
# Solderpad Hardware License, Version 0.51, see LICENSE for details.
# SPDX-License-Identifier: SHL-0.51
package:
name: clint
description: "RISC-V Core-local Interrupt Controller"
authors: ["<NAME> <<EMAIL>>"]
sources:
# Level 0:
- src/axi_lite_interface.sv
# Level 1:
- src/clint.sv
|
# Check NEORV32 software framework and test processor using .ci/ scripts
name: Processor
on:
push:
branches:
- master
paths:
- 'rtl/**'
- 'sw/**'
- 'sim/**'
pull_request:
branches:
- master
paths:
- 'rtl/**'
- 'sw/**'
- 'sim/**'
workflow_dispatch:
jobs:
build:
runs-on: ubuntu-latest
name: Processor'
steps:
- name: '🧰 Repository Checkout'
uses: actions/checkout@v2
- name: '🔧 Setup Environment Variables'
run: |
echo "$GITHUB_WORKSPACE/riscv/bin" >> $GITHUB_PATH
echo $GITHUB_WORKSPACE
- name: '🔧 Setup RISC-V GCC'
run: |
/bin/bash -c "chmod u+x ./.ci/install.sh && ./.ci/install.sh"
echo $GITHUB_WORKSPACE
- name: '🔧 Setup GHDL Simulator'
uses: ghdl/setup-ghdl-ci@nightly
with:
backend: gcc
- name: '⚙️ Run Software Framework Tests'
run: /bin/bash -c "chmod u+x ./.ci/sw_check.sh && ./.ci/sw_check.sh"
- name: '⚙️ Run Processor Hardware Tests'
run: /bin/bash -c "chmod u+x ./.ci/hw_check.sh && ./.ci/hw_check.sh"
|
<filename>software/TimeTool/config/sim.yml
TimeToolDev:
Application:
AppLane[:]:
EventBuilder:
Bypass: 0x1 # Bypass channel 0 (timing channel for simulation only)
|
<reponame>thirono/basil
# Device description for the Keithley 2000 Multimeter.
# Do not expect the Keithley 2000 to work over serial!
# Only GPIB shows working communication with this device!
identifier : KEITHLEY INSTRUMENTS INC.,MODEL 2000
get_current : MEAS:CURR?
get_voltage : MEAS:VOLT?
|
repos:
- repo: https://github.com/psf/black
rev: stable
hooks:
- id: black
language_version: python3.7
args: [--line-length=79]
- repo: https://gitlab.com/daverona/pre-commit/cpp
rev: master
hooks:
- id: clang-format
entry: clang-format
args: []
- id: cpplint
args: ["--filter=-build/include_subdir,-readability/braces", "--exclude=sonar/core/templates/template_tb.cpp"]
- id: cppcheck
entry: cppcheck
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v2.2.3
hooks:
- id: trailing-whitespace
- id: end-of-file-fixer
- id: fix-encoding-pragma
args: [--remove]
- id: check-added-large-files
- id: check-json
- id: check-merge-conflict
- id: pretty-format-json
- repo: https://github.com/PyCQA/pylint
rev: pylint-2.6.0
hooks:
- id: pylint
- repo: https://github.com/PyCQA/bandit
rev: 1.6.2
hooks:
- id: bandit
args: ["--exclude=tests/"]
- repo: https://gitlab.com/pycqa/flake8
rev: 3.7.9
hooks:
- id: flake8
language_version: python3
- repo: https://github.com/jumanjihouse/pre-commit-hooks
rev: 1.11.2
hooks:
- id: shellcheck
- repo: https://github.com/pycqa/isort
rev: 5.5.2
hooks:
- id: isort
- repo: https://github.com/pre-commit/mirrors-mypy
rev: v0.790
hooks:
- id: mypy
# - repo: https://github.com/jorisroovers/gitlint
# rev: v0.13.1
# hooks:
# - id: gitlint
# additional_dependencies: ["nltk"]
|
version: 2
build:
os: "ubuntu-20.04"
tools:
python: "3.8"
python:
# Install our python package before building the docs
install:
- requirements: docs/requirements.txt
system_packages: true
#conda:
# environment: docs/environment.yml
# Build documentation in the docs/ directory with Sphinx
sphinx:
configuration: docs/source/conf.py
fail_on_warning: true
formats:
- pdf
- epub
|
<filename>designs/dragonphy_top/rtl/configure.yml
name: rtl
commands:
- |
mkdir -p outputs
python gen_tcl.py
outputs:
- read_design.tcl
parameters:
adk_name: "tsmc16"
design_name: None
|
<filename>.github/workflows/run_ghdl.yml
name: CI/Run
on:
push:
branches: [ master ]
pull_request:
branches: [ master ]
jobs:
build:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v2
- uses: ghdl/setup-ghdl-ci@nightly
with:
backend: mcode
- name: Setup Python
uses: actions/setup-python@v2
- name: Install Requirements
run: pip install -r requirements.txt
- name: VHDLTest Utility
run: python -m VHDLTest --config test.yaml --log test.log --junit test.xml --exit-0
working-directory: example
env:
PYTHONPATH: ..
|
init:
- set PATH=C:\Python36-x64;C:\Python36-x64\Scripts;%PATH%
build: off
install:
- pip install tox wheel
- python -c "import sys,platform; print(sys.platform); print(platform.machine())"
test_script:
- tox -e cover
|
version: 1.1.0-b{build}
clone_folder: c:\projects\poc
init:
# Checking that DEP is enabled
- ps: Write-Host "Initializing virtual machine ..."
- ps: git --version
- ps: $env:Path = $env:Path.Replace("Python27", "Python35-x64")
install:
- ps: Write-Host "Installing requirements..."
- ps: python --version
- ps: python -m pip install pip --upgrade
- ps: python -m pip install py-flags
- ps: python -m pip install -r .\requirements.txt
- ps: python -m pip list --format=columns
- ps: Write-Host "Configuring PoC..."
- ps: cp .\tools\AppVeyor\config.private.ini .\py\
- ps: cp .\tools\AppVeyor\my_project.vhdl .\tb\common\
- ps: .\poc.ps1
build: off
build_script:
- ps: Write-Host "Testing query interface..."
- ps: .\poc.ps1 query INSTALL.PoC:InstallationDirectory
- ps: Write-Host "Testing information interface..."
- ps: .\poc.ps1 list-testbench PoC.*
- ps: .\poc.ps1 list-netlist PoC.*
- ps: Write-Host "Testing simulators..."
- ps: .\poc.ps1 --dryrun ghdl PoC.arith.prng --analyze --elaborate
- ps: Write-Host "Testing synthesizers..."
# - ps: .\poc.ps1 --dryrun quartus PoC.arith.prng --board=DE4
# - ps: .\poc.ps1 --dryrun lse PoC.arith.prng --board=ECP5Versa
# - ps: .\poc.ps1 --dryrun xst PoC.arith.prng --board=KC705
# - ps: .\poc.ps1 --dryrun vivado PoC.arith.prng --board=KC705
# - ps: Write-Host "Testing core generators..."
# - ps: .\poc.ps1 --dryrun coregen PoC.xil.mig.Atlys_1x128 --board=Atlys
|
<reponame>davideschiavone/core-v-mcu<gh_stars>10-100
logint_dc_fifo:
files: [
log_int_dc_slice.sv,
log_int_dc_slice_wrap.sv,
dc_data_buffer.sv,
dc_full_detector.v,
dc_synchronizer.v,
dc_token_ring_fifo_din.v,
dc_token_ring_fifo_dout.v,
dc_token_ring.v
]
|
<filename>conf/fusesoc-configs/ibex.yml
name: ibex
description: Full ibex core test
top_module: ibex_core
tags: ibex
path: third_party/cores/ibex
command: fusesoc --cores-root third_party/cores/ibex run --target=synth --setup lowrisc:ibex:top_artya7
conf_file: build/lowrisc_ibex_top_artya7_0.1/synth-vivado/core-deps.mk
test_file: ibex.sv
timeout: 100
|
<filename>conf/fusesoc-configs/swerv.yml
name: swerv
description: Full swerv core test
top_module: swerv_wrapper
tags: swerv
path: third_party/cores/swerv
command: fusesoc --cores-root third_party/cores/swerv run --target=lint --setup chipsalliance.org:cores:SweRV_EH1:1.8
conf_file: build/chipsalliance.org_cores_SweRV_EH1_1.8/lint-verilator/chipsalliance.org_cores_SweRV_EH1_1.8.vc
test_file: swerv.sv
timeout: 180
|
metadata:
name: mmio
entity:
bus-flatten: yes
bus-prefix: mmio_
clock-name: kcd_clk
reset-name: kcd_reset
features:
bus-width: 64
optimize: yes
interface:
flatten: yes
fields:
- address: 0b0---
name: AFU_DHF
behavior: constant
value: 17293826967149215744 # [63:60]: 1 && [40]: 1
- address: 0b1---
name: AFU_ID_L
behavior: constant
value: 9298143585121133213 # check primmap.json
- address: 0b10---
name: AFU_ID_H
behavior: constant
value: 4422222669130253009 # check primmap.json
- address: 0b11---
name: DFH_RSVD0
behavior: constant
value: 0
- address: 0b100---
name: DFH_RSVD1
behavior: constant
value: 0
- address: 64
name: start
doc: Start the kernel.
bitrange: 0
behavior: strobe
- address: 64
name: stop
doc: Stop the kernel.
bitrange: 1
behavior: strobe
- address: 64
name: reset
doc: Reset the kernel.
bitrange: 2
behavior: strobe
- address: 68
name: idle
doc: Kernel idle status.
bitrange: 32
behavior: status
- address: 68
name: busy
doc: Kernel busy status.
bitrange: 33
behavior: status
- address: 68
name: done
doc: Kernel done status.
bitrange: 34
behavior: status
- address: 72
name: result
doc: Result.
bitrange: 63..0
behavior: status
- address: 80
name: in_firstidx
doc: in first index.
bitrange: 31..0
behavior: control
- address: 84
name: in_lastidx
doc: in last index (exclusive).
bitrange: 63..32
behavior: control
- address: 88
name: out_firstidx
doc: out first index.
bitrange: 31..0
behavior: control
- address: 92
name: out_lastidx
doc: out last index (exclusive).
bitrange: 63..32
behavior: control
- address: 96
name: in_number_values
doc: Buffer address for in number_values
bitrange: 63..0
behavior: control
- address: 104
name: out_number_values
doc: Buffer address for out number_values
bitrange: 63..0
behavior: control
- address: 112
name: Profile_enable
doc: Activates profiler counting when this bit is high.
bitrange: 0
behavior: control
- address: 116
name: Profile_clear
doc: Resets profiler counters when this bit is asserted.
bitrange: 32
behavior: strobe
|
<filename>circle.yml
dependencies:
pre:
- sudo apt-get install software-properties-common
- sudo add-apt-repository -y ppa:george-edison55/cmake-3.x
- sudo apt-get update
- sudo apt-get install cmake cmake-data build-essential libc6 flex bison
test:
override:
- mkdir -p assembler/build/
- cd assembler/build; cmake -DTARGET_GROUP=production .. && make all
- cd assembler/build; cmake -DTARGET_GROUP=test .. && make all && ./d16-tests/testing_main | tee testout.txt
- sudo cp assembler/build/d16-main/d16 /usr/local/bin
post:
- assembler/d16-tests/Unity/auto/parseOutput.rb -xml assembler/build/testout.txt
- cp report.xml $CIRCLE_TEST_REPORTS/
- cp assembler/build/d16-main/d16 $CIRCLE_ARTIFACTS/
|
<gh_stars>100-1000
S25FS256_model:
defines: [
SPEEDSIM,
]
files: [
spi_flash/S25fs256s/model/s25fs256s.v,
]
flags: [
skip_synthesis,
]
24FC1025_model:
defines: [
SPEEDSIM,
]
files: [
i2c_eeprom/24FC1025.v,
]
flags: [
skip_synthesis,
]
hyperflash_model:
defines: [
SPEEDSIM,
]
files: [
hyperflash_model/s26ks512s.v,
]
flags: [
skip_synthesis,
]
hyperram_model:
defines: [
SPEEDSIM,
]
files: [
hyperram_model/s27ks0641.v,
]
flags: [
skip_synthesis,
]
psram_model:
defines: [
SPEEDSIM,
]
files: [
psram_model/*.vp_modelsim,
]
flags: [
skip_synthesis,
]
i2s_model:
defines: [
SPEEDSIM,
]
files: [
i2s/i2c_if.v,
i2s/i2s_vip_channel.sv,
i2s/i2s_vip.sv,
]
flags: [
skip_synthesis,
]
open_models:
files: [
spi_master_padframe.sv,
uart_tb_rx.sv,
camera/cam_vip.sv
]
flags: [
skip_synthesis,
]
|
<gh_stars>1-10
# Copyright lowRISC contributors.
# Licensed under the Apache License, Version 2.0, see LICENSE for details.
# SPDX-License-Identifier: Apache-2.0
parameters:
rvc_test_suites: []
jobs:
- job: riscv_compliance_tests_${{ join('_', parameters.rvc_test_suites) }}
# Disable RISC-V Compliance tests due to excessive runtimes in simulation
# (#4128).
condition: false
displayName: Execute RISC-V compliance tests for ${{ join(' ', parameters.rvc_test_suites) }}
pool:
vmImage: ubuntu-18.04
dependsOn:
- top_earlgrey_verilator
- sw_build
steps:
- template: install-package-dependencies.yml
- template: download-artifacts-template.yml
- bash: |
set -x
sudo util/get-toolchain.py \
--install-dir="${TOOLCHAIN_PATH}" \
--release-version="${TOOLCHAIN_VERSION}" \
--update
displayName: Install toolchain
- ${{ each test_suite in parameters.rvc_test_suites }}:
- bash: |
set -e
ci/run_riscv_compliance.sh ${{ test_suite }}
displayName: Execute ${{ test_suite }}
|
<reponame>kmettias/ara<gh_stars>0
# Copyright 2021 ETH Zurich and University of Bologna.
# Solderpad Hardware License, Version 0.51, see LICENSE for details.
# SPDX-License-Identifier: SHL-0.51
#
# Author: <NAME> <<EMAIL>>
# Lint the design
name: lint
on: [push, pull_request]
jobs:
check-license:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions/setup-python@v2
with:
python-version: 3.x
- name: Install Python requirements
run: pip install -r python-requirements.txt
- name: Check license
run: python scripts/licence-checker.py --config scripts/licence-checker.hjson hardware
check-clang-format:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions/setup-python@v2
with:
python-version: 3.x
- name: Install Python requirements
run: pip install -r python-requirements.txt
- name: Install clang-format
run: sudo apt-get install clang-format
- name: Run clang-format
run: |
for file in `find apps -type f -name "*.[c|h|cpp|hpp]" | grep -vP "apps/riscv-tests"`; do
./scripts/run-clang-format.py --clang-format-executable clang-format $file || EXIT_STATUS=$?
done
exit $EXIT_STATUS
check-trailing-whitespaces:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
with:
fetch-depth: 0
- name: Determine base commit
run: |
if [[ -n $GITHUB_BASE_REF ]]; then
# Make sure we have the latest version of the target branch
git fetch origin $GITHUB_BASE_REF
echo "base=origin/$GITHUB_BASE_REF" >> $GITHUB_ENV
else
echo "base=HEAD~1" >> $GITHUB_ENV
fi
- name: Check for trailing whitespaces and tabs
run: git diff --check $base HEAD --
|
# General Hammer Inputs
# Placement Constraints
#
vlsi.inputs.placement_constraints:
- path: "RocketTile"
type: toplevel
x: 0
y: 0
width: 10000
height: 10000
margins:
left: 0
right: 0
top: 0
bottom: 0
# Auto-floor-plan
par.innovus.floorplan_mode: "auto"
vlsi.core.max_threads: 32
# Hammer will auto-generate a CPF for simple power designs; see hammer/src/hammer-vlsi/defaults.yml for more info
vlsi.inputs.power_spec_mode: "auto"
vlsi.inputs.power_spec_type: "cpf"
# Specify clock signals
vlsi.inputs.clocks: [
{name: "clock", period: "1000ns", uncertainty: "0.1ns"}
]
# Power Straps
par.power_straps_mode: generate
par.generate_power_straps_method: by_tracks
par.blockage_spacing: 2.0
par.generate_power_straps_options:
by_tracks:
strap_layers:
- met2
- met3
- met4
- met5
pin_layers:
- met5
track_width: 6
track_width_met5: 1
track_spacing: 1
track_start: 10
track_start_met5: 1
power_utilization: 0.2
power_utilization_met4: 1
power_utilization_met5: 1
# Pin placement constraints
vlsi.inputs.pin_mode: generated
vlsi.inputs.pin.generate_mode: semi_auto
vlsi.inputs.pin.assignments: [
{pins: "*", layers: ["met2", "met4"], side: "bottom"}
]
# Generate Make include to aid in flow
vlsi.core.build_system: make
|
<reponame>iicarus-bit/google-ctf<gh_stars>0
apiVersion: "apps/v1"
kind: "Deployment"
metadata:
name: "chal"
spec:
template:
spec:
containers:
- name: "challenge"
livenessProbe:
failureThreshold: 2
httpGet:
path: /healthz
port: 8080
initialDelaySeconds: 45
timeoutSeconds: 3
periodSeconds: 30
readinessProbe:
httpGet:
path: /healthz
port: 8080
initialDelaySeconds: 5
timeoutSeconds: 3
periodSeconds: 5
- name: "healthcheck"
image: "healthcheck"
command:
resources:
limits:
cpu: "1"
requests:
cpu: "0.05"
volumeMounts:
- name: "healthcheck-secrets"
mountPath: "/secrets"
readOnly: true
- name: "healthcheck-exploit-key"
mountPath: "/keys"
readOnly: true
- name: "healthcheck-config"
mountPath: "/config"
readOnly: true
volumes:
- name: "healthcheck-secrets"
secret:
secretName: "healthcheck-secrets"
defaultMode: 0444
- name: "healthcheck-exploit-key"
secret:
secretName: "healthcheck-exploit-key"
defaultMode: 0444
- name: "healthcheck-config"
configMap:
name: "healthcheck-config"
|
cryoAsicGen1:
enable: True
ForceWrite: False
EpixHRGen1Cryo:
enable: True
PowerSupply:
enable: True
DigitalEn: True
AnalogEn: True
ProgPowerSupply:
enable: True
Vdd1: 0x8000
Vdd2: 0x8000
Vdd1_V: '2.500'
Vdd2_V: '2.500'
|
# This workflow will install Python dependencies, run tests and lint with a single version of Python
# For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions
name: Linux Test
on:
push:
pull_request:
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Set up Python 3.8
uses: actions/setup-python@v2
with:
python-version: 3.8
- name: Install dependencies
run: |
sudo apt install -y verilator libgmp-dev libmpfr-dev libmpc-dev
python -m pip install --upgrade pip
pip install flake8 pytest pytest-cov pytest-pycodestyle fault
pip install kratos # test optional dependency
pip install -e .
- name: Test with pytest
run: |
py.test --cov magma -v --cov-report term-missing tests
pycodestyle magma/
- name: Test with pytest
run: |
bash <(curl -s https://codecov.io/bash)
|
site_name: "<NAME> - Blog"
theme:
name: material
logo: assets/me.png
favicon: assets/icon.png
features:
- navigation.instant
- navigation.top
- search.suggest
- search.highlight
- search.share
markdown_extensions:
- footnotes
- toc:
permalink: true
extra:
homepage: https://joecrop.com
extra_css:
- css/site.css
- https://cdnjs.cloudflare.com/ajax/libs/highlight.js/11.2.0/styles/github.min.css
extra_javascript:
- https://cdnjs.cloudflare.com/ajax/libs/highlight.js/11.2.0/highlight.min.js
- https://cdnjs.cloudflare.com/ajax/libs/highlight.js/11.2.0/languages/javascript.min.js
- js/site.js
copyright: Copyright © 2022 <NAME>
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.