Search is not available for this dataset
content
stringlengths 0
376M
|
---|
sudo: required
language: python
python:
- "3.6"
addons:
apt:
update: true
before_install:
- pip install git+https://github.com/m-labs/nmigen
- pip install codecov
script:
- coverage run -m unittest discover
- codecov
notifications:
email:
on_failure: never
on_success: never
|
<filename>books/centaur/quicklisp/bundle/software/babel-20141217-git/.travis.yml
language: lisp
env:
matrix:
- LISP=abcl
- LISP=allegro
- LISP=sbcl
- LISP=sbcl32
- LISP=ccl
- LISP=ccl32
- LISP=clisp
- LISP=clisp32
- LISP=cmucl
- LISP=ecl
matrix:
allow_failures:
- env: LISP=cmucl
- env: LISP=ecl
install:
- curl -L https://github.com/luismbo/cl-travis/raw/master/install.sh | sh
- git clone --depth=1 git://github.com/trivial-features/trivial-features.git ~/lisp/trivial-features
- git clone git://common-lisp.net/projects/alexandria/alexandria.git ~/lisp/alexandria
script:
- cl -e '(ql:quickload :babel-tests)
(unless (babel-tests:run)
(uiop:quit 1))'
|
<filename>.circleci/config.yml
version: 2
jobs:
build:
docker:
- image: jeffbush001/nyuzi-build
steps:
- checkout
- run:
name: Compile and Test
command: scripts/run_ci_tests.sh
|
sudo: false
language: scala
scala:
- 2.12.4
jdk:
- oraclejdk8
branches:
only:
- master
cache:
directories:
- $HOME/.ivy2
addons:
postgresql: '9.5'
addons:
apt:
packages:
- language-pack-en
- language-pack-sv
addons:
chrome: stable
install:
- wget -N http://chromedriver.storage.googleapis.com/2.36/chromedriver_linux64.zip -P ~/
- unzip ~/chromedriver_linux64.zip -d ~/
- rm ~/chromedriver_linux64.zip
- sudo mv -f ~/chromedriver /usr/local/share/
- sudo chmod +x /usr/local/share/chromedriver
- sudo ln -s /usr/local/share/chromedriver /usr/local/bin/chromedriver
before_script:
- psql -c "create database signup;" -U postgres
- psql signup -c "create user signup4 password '<PASSWORD>+';" -U postgres
- psql signup -c "grant all privileges on database signup to signup4;" -U postgres
env: APPLICATION_BASE_URL=http://127.0.0.1:19001 LANG=sv_SE TZ=Europe/Stockholm
script:
- whereis google-chrome-stable
- whereis chromedriver
- sbt -Dwebdriver.chrome.driver=/usr/local/bin/chromedriver 'testOnly se.crisp.signup4.unit.* se.crisp.signup4.integration.* se.crisp.signup4.acceptance.*'
deploy:
provider: heroku
api_key:
secure: <KEY>
app: signup-ci-test
|
# See https://pre-commit.com for more information
# See https://pre-commit.com/hooks.html for more hooks
default_install_hook_types: [pre-commit, commit-msg]
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v3.2.0
hooks:
- id: trailing-whitespace
stages: [commit, manual ]
- id: end-of-file-fixer
stages: [ commit, manual ]
- id: check-yaml
stages: [ commit, manual ]
- id: check-added-large-files
stages: [ commit, manual ]
- id: check-merge-conflict
stages: [ commit, manual ]
- id: check-toml
stages: [ commit, manual ]
- id: check-vcs-permalinks
stages: [ commit, manual ]
- id: no-commit-to-branch
stages: [ commit, manual ]
- repo: https://github.com/pre-commit/mirrors-mypy
rev: v0.950
hooks:
- id: mypy
stages: [ manual ]
- repo: https://github.com/asottile/dead
rev: v1.5.0
hooks:
- id: dead
stages: [ manual ]
- repo: https://github.com/PyCQA/isort
rev: 5.10.1
hooks:
- id: isort
stages: [ commit, manual ]
- repo: https://github.com/psf/black
rev: 22.3.0
hooks:
- id: black
stages: [ commit, manual ]
- repo: https://github.com/alessandrojcm/commitlint-pre-commit-hook
rev: v8.0.0
hooks:
- id: commitlint
stages: [commit-msg, manual]
|
<reponame>cmarqu/pyVHDLParser
language: python
matrix:
include:
# Python 3.5
- env: CONFIG="Python3.5"
os: linux
sudo: false
python: "3.5"
install:
- pip3 install -r .Travis-CI/requirements.txt
before_script:
- ./.PyPI/PyPI.setup.sh
script:
- python3 ./TestSuite.py
deploy:
skip_cleanup: true
provider: releases
api_key:
secure: <KEY>
on:
tags: true
all_branches: true
|
---
- name: Hello World
hosts: localhost
tasks:
- name: Echo 'Hello World'
debug: # Default msg parameter is 'Hello World!'
|
file_list:
- impl1/source/top.vhd
- ../../common/utility/source/blink.vhd
- ../../common/utility/source/clk_div_n.vhd
- ../../common/utility/source/edge_detect.vhd
- ../../common/utility/source/pwm.vhd
- ../../common/utility/source/sdm.vhd
- ../../common/utility/source/quad_decoder.vhd
- ../../common/utility/source/delay_line.vhd
- ../../common/utility/source/level_filter.vhd
- ../../common/utility/source/step_generator.vhd
- ../../common/utility/sim/sim_on_percent.vhd
- ../../common/utility/sim/sim_edge_count.vhd
- ../../common/utility/sim/clk_div_n_tb.vhd
- ../../common/utility/sim/edge_detect_tb.vhd
- ../../common/utility/sim/pwm_tb.vhd
- ../../common/utility/sim/sdm_tb.vhd
- ../../common/utility/sim/quad_decoder_tb.vhd
- ../../common/utility/sim/delay_line_tb.vhd
- ../../common/utility/sim/level_filter_tb.vhd
- ../../common/utility/sim/step_generator_tb.vhd
- ../../common/devices/source/pwm_device.vhd
- ../../common/devices/source/sdm_device.vhd
- ../../common/devices/source/gpio_device.vhd
- ../../common/devices/source/drv8711_spi.vhd
- ../../common/devices/sim/pwm_device_tb.vhd
- ../../common/devices/sim/sdm_device_tb.vhd
- ../../common/devices/sim/gpio_device_tb.vhd
- ../../common/devices/sim/sim_step_counter.vhd
- ../../common/devices/sim/sim_drv8711_spi.vhd
- ../../common/devices/sim/sim_drv8711.vhd
- ../../common/devices/sim/drv8711_spi_tb.vhd
- ../../common/communications/source/spi_slave.vhd
- ../../common/communications/sim/sim_spi_master.vhd
- ../../common/communications/sim/spi_slave_tb.vhd
|
<reponame>zld012739/zldrepository<gh_stars>0
#Descriptions:
#CMD: cpld command
#A: Assitant Board
#T: Target Board
#FUNC0: the function of this pin connection
#DIRECTION: A2T T2A T2T
#FRDM A-B-C-D DEFINITION
#| NAME | SIZE | CPLD_SCH | FRDM_SCH
#| A | (10*2) | JJ_2 | ?
#| B | (8*2) | JJ_1 | ?
#| C | (5*2) | JJ_3 | ?
#| D | (8*2) | JJ_4 | ?
#SINGLE: default 0, if the pin header is single on FRDM-Board, this should be set 1
SINGLE: 0
SPI_SLAVE_FREERTOS:
IP: __SPI_SLAVE_FREERTOS
CMD: SPS
ALT: a
ss_sck_freertos:
PIN: JJ3_3
DIRECTION: IN
ss_mosi_freertos:
PIN: JJ3_7
DIRECTION: IN
ss_ssel_freertos:
PIN: JJ3_6
DIRECTION: IN
ss_miso_freertos:
PIN: JJ3_5
DIRECTION: OUT
spi_slave_result_freertos:
PIN: A35_K70
DIRECTION: OUT
|
resources:
repositories:
- repository: abs-tudelft
type: github
endpoint: github
name: abs-tudelft/azure-pipelines-templates
variables:
arrowVersion: 0.16.0
ocaccelVersion: master
ocseVersion: master
jobs:
- job: runtime
pool:
vmImage: ubuntu-latest
steps:
- checkout: self
submodules: true
- template: steps/install-fletcher.yml@abs-tudelft
- template: steps/install-oc-accel.yml@abs-tudelft
parameters:
ocaccelVersion: $(ocaccelVersion)
ocseVersion: $(ocseVersion)
- template: steps/install-arrow.yml@abs-tudelft
parameters:
version: $(arrowVersion)
- template: steps/install-cmake.yml@abs-tudelft
- template: steps/cmake.yml@abs-tudelft
parameters:
source: runtime
- job: sum_sw
pool:
vmImage: ubuntu-latest
steps:
- checkout: self
submodules: true
- template: steps/install-fletcher.yml@abs-tudelft
- template: steps/install-oc-accel.yml@abs-tudelft
parameters:
ocaccelVersion: $(ocaccelVersion)
ocseVersion: $(ocseVersion)
- template: steps/install-arrow.yml@abs-tudelft
parameters:
version: $(arrowVersion)
- template: steps/install-cmake.yml@abs-tudelft
- script: |
make -C examples/sum
displayName: Build
- job: stringwrite_sw
pool:
vmImage: ubuntu-latest
steps:
- checkout: self
submodules: true
- template: steps/install-fletcher.yml@abs-tudelft
- template: steps/install-oc-accel.yml@abs-tudelft
parameters:
ocaccelVersion: $(ocaccelVersion)
ocseVersion: $(ocseVersion)
- template: steps/install-arrow.yml@abs-tudelft
parameters:
version: $(arrowVersion)
- template: steps/install-cmake.yml@abs-tudelft
- script: |
make -C examples/stringwrite
displayName: Build
|
python-targets:
- 3
test-warnings: no
strictness: medium
max-line-length: 180
pylint:
disable:
- too-many-arguments
pep8:
disable:
- E704
options:
max-line-length: 180
mccabe:
options:
max-complexity: 20
ignore-paths:
- docs
- vhdl
|
<reponame>popas90/vhdl-parser<gh_stars>1-10
sudo: false
language: python
python:
- "3.5"
##Install dependencies
install:
- pip install -r requirements.txt
##Run tests
script:
- ./setup.sh --local && ./build.sh --local && ./validate.sh
notifications:
email:
on_success: never
on_failure: never
|
---
category:
- Recursion
- Memoization
- Classic CS problems and programs
- Arithmetic
- Simple
note: Arithmetic operations
|
<reponame>parzival3/Surelog
variables:
VSIM: vsim-10.6b -64
VLIB: vlib-10.6b
VLOG: vlog-10.6b -64
SYNOPSYS_DC: synopsys-2016.03 dc_shell -64bit
before_script:
- export PATH=~fschuiki/.cargo/bin:$PATH
- mkdir -p build
modelsim:
script:
- bender vsim
- cd build
- ../scripts/compile_vsim.sh
- ../scripts/run_vsim.sh
synopsys:
script:
- cd build
- ../scripts/synth.sh
|
<reponame>Ameln93/cv32e40p<filename>src_files.yml
cv32e40p_regfile_rtl:
targets: [
rtl,
tsmc55,
gf22,
]
incdirs: [
./rtl/include,
]
files: [
./rtl/cv32e40p_register_file_test_wrap.sv,
./rtl/cv32e40p_register_file_latch.sv,
]
riscv:
vlog_opts: [
-L fpnew_lib,
]
incdirs: [
./rtl/include,
../../rtl/includes,
]
files: [
./rtl/include/cv32e40p_apu_core_package.sv,
./rtl/include/cv32e40p_defines.sv,
./rtl/include/cv32e40p_tracer_defines.sv,
./rtl/cv32e40p_alu.sv,
./rtl/cv32e40p_alu_div.sv,
./rtl/cv32e40p_ff_one.sv,
./rtl/cv32e40p_popcnt.sv,
./rtl/cv32e40p_compressed_decoder.sv,
./rtl/cv32e40p_controller.sv,
./rtl/cv32e40p_cs_registers.sv,
./rtl/cv32e40p_decoder.sv,
./rtl/cv32e40p_int_controller.sv,
./rtl/cv32e40p_ex_stage.sv,
./rtl/cv32e40p_hwloop_controller.sv,
./rtl/cv32e40p_hwloop_regs.sv,
./rtl/cv32e40p_id_stage.sv,
./rtl/cv32e40p_if_stage.sv,
./rtl/cv32e40p_load_store_unit.sv,
./rtl/cv32e40p_mult.sv,
./rtl/cv32e40p_prefetch_buffer.sv,
./rtl/cv32e40p_obi_interface.sv
./rtl/cv32e40p_prefetch_controller.sv
./rtl/cv32e40p_sleep_unit.sv
./rtl/cv32e40p_core.sv,
./rtl/cv32e40p_apu_disp.sv,
./rtl/cv32e40p_fetch_fifo.sv,
./rtl/cv32e40p_pmp.sv,
]
cv32e40p_vip_rtl:
targets: [
rtl,
]
incdirs: [
./rtl/include,
]
files: [
./rtl/cv32e40p_tracer.sv,
./rtl/cv32e40p_sim_clock_gate.sv,
]
flags: [
skip_synthesis,
]
cv32e40p_regfile_rtl:
targets: [
rtl,
tsmc55,
gf22,
]
incdirs: [
./rtl/include,
]
files: [
./rtl/cv32e40p_register_file_test_wrap.sv,
./rtl/cv32e40p_register_file_latch.sv,
]
cv32e40p_regfile_verilator:
targets: [
verilator,
]
files: [
./rtl/cv32e40p_register_file_ff.sv,
]
cv32e40p_regfile_fpga:
targets: [
xilinx,
]
incdirs: [
./rtl/include,
]
files: [
./rtl/cv32e40p_register_file_test_wrap.sv,
./rtl/cv32e40p_register_file_ff.sv,
]
tb_riscv:
sim_tools: [
questa
]
synth_tools: [
mentor
]
targets: [
rtl,
]
flags: [
skip_synthesis,
]
incdirs: [
tb/tb_riscv/include,
rtl/include,
]
files: [
tb/tb_riscv/include/perturbation_defines.sv,
tb/tb_riscv/riscv_simchecker.sv,
tb/tb_riscv/tb_riscv_core.sv,
tb/tb_riscv/riscv_perturbation.sv,
tb/tb_riscv/riscv_random_interrupt_generator.sv,
tb/tb_riscv/riscv_random_stall.sv,
]
|
<reponame>Cloud-V/Fault<filename>.github/workflows/ci.yml
on:
push:
tags:
- "*"
name: CI
jobs:
build_image:
name: "Build"
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
with:
fetch-depth: 0
- name: Get Tag
if: ${{ github.event_name == 'push' && startsWith(github.ref, 'refs/tags/') }}
run: |
echo TAG="$(ruby -e "print '$GITHUB_REF'.split('/')[2]")" >> $GITHUB_ENV
- name: Build Images
run: |
docker build -t cloud-v/fault:latest .
- name: Log in to the Container registry
uses: docker/login-action@f<PASSWORD>39<PASSWORD>
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.<PASSWORD> }}
- name: Push to GHCR
run: |
docker image tag cloud-v/fault:latest ghcr.io/cloud-v/fault:latest
docker image tag cloud-v/fault:latest ghcr.io/cloud-v/fault:$TAG
docker push ghcr.io/cloud-v/fault:$TAG
docker push ghcr.io/cloud-v/fault:latest
|
# Details: https://github.com/actions/labeler/blob/master/README.md
host_net:
- host/net/*
host:
- host/*
monitor:
- monitor/*
board_fpga:
- board/fpga/*
board_soc:
- board/soc/*
|
<reponame>Purdue-SoCET/RISCVBusiness
# ISA Configurations
isa_params:
xlen : 32
# Microarchitectural Configurations
microarch_params:
# Branch/Jump Configurations
br_predictor_type : "not_taken"
# Cache configurations
cache_config : "separate"
dcache_type : "pass_through"
icache_type : "pass_through"
# Bus configurations
bus_endianness : "big"
bus_interface_type : "generic_bus_if"
# Sparisty Optimizations
sparce_enabled : "enabled"
# RV32C
rv32c_enabled : "disabled"
# Halt
infinite_loop_halts : "true"
# RISC-MGMT Extension Configuration
risc_mgmt_params:
standard_extensions:
nonstandard_extensions:
|
#sudo: required
language: bash
git:
depth: false
quiet: true
branches:
only:
- master
services:
- docker
# maybe add a stage with a trivial test
env:
matrix:
- BUILD_MODE=opt DOCKER=mascucsc/archlinux-masc DOCKER_LGRAPH_SRC=/root/livehd LGRAPH_COMPILER=g++
- BUILD_MODE=dbg DOCKER=mascucsc/archlinux-masc DOCKER_LGRAPH_SRC=/root/livehd LGRAPH_COMPILER=clang++
- BUILD_MODE=fastbuild DOCKER=mascucsc/kaliriscv-masc DOCKER_LGRAPH_SRC=/root/livehd LGRAPH_COMPILER=g++
- BUILD_MODE=dbg DOCKER=mascucsc/ubuntu-masc DOCKER_LGRAPH_SRC=/root/livehd LGRAPH_COMPILER=g++-8
before_install:
- docker pull $DOCKER
script:
- scripts/run-docker-test.sh `pwd` $BUILD_MODE $DOCKER $DOCKER_LGRAPH_SRC $LGRAPH_COMPILER
# broken bazel (not livehd) gcc interaction
# - BUILD_MODE=fastbuild DOCKER=mascucsc/alpine-masc DOCKER_LGRAPH_SRC=/home/user/livehd LGRAPH_COMPILER=g++
|
machine:
pre:
- wget -O ghdl.deb https://github.com/tgingold/ghdl/releases/download/v0.33/ghdl_0.33-1ubuntu1_amd64.deb
- sudo dpkg -i ghdl.deb || true
- sudo apt-get install -f
test:
override:
- ghdl -i --work=LFSR src/LFSR/*.vhd
- ghdl -i --work=JUNIT_TB test/JUNIT_TB/*.vhd
- ghdl -i --work=LFSR_TB test/LFSR_TB/*.vhd
- ghdl -m --work=LFSR_TB main_tb | tee $CIRCLE_ARTIFACTS/make.log
- ghdl -r main_tb --disp-tree=proc --wave=$CIRCLE_ARTIFACTS/main_tb.ghw 2>&1 | tee $CIRCLE_ARTIFACTS/sim.log
- mkdir $CIRCLE_TEST_REPORTS/ghdl
- cp main_tb_junit.xml $CIRCLE_TEST_REPORTS/ghdl/junit.xml
|
on: [push, pull_request]
name: CI
jobs:
lints:
name: Lints
runs-on: ubuntu-latest
steps:
- name: Checkout sources
uses: actions/checkout@v2
- name: Install Rust toolchain
uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: stable
override: true
components: rustfmt, clippy
- name: Run rustfmt
uses: actions-rs/cargo@v1
with:
command: fmt
args: --all -- --check
- name: Run clippy
uses: actions-rs/cargo@v1
continue-on-error: true # TODO: Remove once we have fixed them all
with:
command: clippy
check:
name: Checks
runs-on: ubuntu-latest
steps:
- name: Checkout sources
uses: actions/checkout@v2
with:
submodules: recursive
- name: Install Rust toolchain
uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: stable
override: true
components: rustfmt, clippy
- name: Configure Environment
id: env
run: |
echo "CIRCT_SYS_CIRCT_DIR=$GITHUB_WORKSPACE/circt" >> $GITHUB_ENV
echo "::set-output name=circt-hash::$(git rev-parse @:./circt)"
echo "::set-output name=llvm-hash::$(cd circt && git rev-parse @:./llvm)"
# Fetch LLVM from the cache if possible, otherwise rebuild it.
- name: Fetch LLVM from cache
id: cache-llvm
uses: martijnhols/actions-cache/restore@v3
with:
path: circt/llvm/build
key: ${{ runner.os }}-llvm-${{ hashFiles('.github/build-llvm.sh') }}-${{ steps.env.outputs.llvm-hash }}
- name: Rebuild LLVM
if: steps.cache-llvm.outputs.cache-hit != 'true'
run: .github/build-llvm.sh
- name: Upload LLVM to cache
if: steps.cache-llvm.outputs.cache-hit != 'true'
uses: martijnhols/actions-cache/save@v3
with:
path: circt/llvm/build
key: ${{ steps.cache-llvm.outputs.primary-key }}
# Fetch CIRCT from the cache if possible, otherwise rebuild it.
- name: Fetch CIRCT from cache
id: cache-circt
uses: martijnhols/actions-cache/restore@v3
with:
path: circt/build
key: ${{ runner.os }}-circt-${{ hashFiles('.github/build-circt.sh') }}-${{ steps.env.outputs.circt-hash }}
- name: Rebuild CIRCT
if: steps.cache-circt.outputs.cache-hit != 'true'
run: .github/build-circt.sh
- name: Upload CIRCT to cache
if: steps.cache-circt.outputs.cache-hit != 'true'
uses: martijnhols/actions-cache/save@v3
with:
path: circt/build
key: ${{ steps.cache-circt.outputs.primary-key }}
# Run the quick check build.
- name: Run "cargo check"
uses: actions-rs/cargo@v1
with:
command: check
args: --all
# Run the regular build.
- name: Run "cargo build"
uses: actions-rs/cargo@v1
with:
command: build
# Run the unit tests.
- name: Run "cargo test"
uses: actions-rs/cargo@v1
with:
command: test
args: --all
# Run the tests.
- name: Run lit tests
run: circt/llvm/build/bin/llvm-lit test -v
- name: Run regression tests (old)
run: env TERM=xterm-color test/run.sh
- name: Run regression tests (new)
run: python3 scripts/test.py --debug --verbose
|
<filename>.github/workflows/python-app.yml<gh_stars>10-100
# This workflow will install Python dependencies, run tests and lint with a single version of Python
# For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions
name: avr (single test)
on:
workflow_dispatch:
inputs:
timeout:
description: timeout (in seconds)
default: 300
required: true
file:
description: Benchmark file
default: "examples/btor2/counter.btor2"
required: true
name:
description: Benchmark name
default: "test"
required: true
args:
description: Additional arguments
default: ""
required: false
jobs:
job1:
runs-on: ubuntu-16.04
steps:
- uses: actions/checkout@v2
- name: Set up Python 3.7
uses: actions/setup-python@v2
with:
python-version: 3.7
- name: Install Yosys
run: |
sudo add-apt-repository -y ppa:saltmakrell/ppa
sudo apt-get update -q
sudo apt-get install yosys
if: endsWith(github.event.inputs.file, '.v')
- name: Run file ${{ github.event.inputs.file }} with args ${{ github.event.inputs.args }}
run: |
ENVTIMEOUT=${{ github.event.inputs.timeout }} ./ci/deploy_bm.sh ${{ github.event.inputs.file }} ${{ github.event.inputs.name }} ${{ github.event.inputs.args }}
- name: Upload avr stats
uses: actions/upload-artifact@v2
with:
name: stats.txt
path: output/work_${{ github.event.inputs.name }}/${{ github.event.inputs.name }}.results
if: ${{ always() }}
|
<reponame>slaclab/lcls-timing-core
##############################################################################
## This file is part of 'LCLS Timing Core'.
## It is subject to the license terms in the LICENSE.txt file found in the
## top-level directory of this distribution and at:
## https://confluence.slac.stanford.edu/display/ppareg/LICENSE.html.
## No part of 'LCLS Timing Core', including this file,
## may be copied, modified, propagated, or distributed except according to
## the terms contained in the LICENSE.txt file.
##############################################################################
#schemaversion 3.0.0
#once EvrV2CoreTriggers.yaml
#include EvrV2ChannelReg.yaml
#include EvrV2TriggerReg.yaml
## Refer to https://confluence.slac.stanford.edu/download/attachments/216713616/ConfigEvrV2CoreTriggersYaml.pdf
EvrV2CoreTriggers: &EvrV2CoreTriggers
class: MMIODev
configPrio: 1
description: Core trigger control
size: 0x40000
metadata:
numTriggers: &numTriggers 16
############
children:
############
#########################################################
EvrV2ChannelReg:
<<: *EvrV2ChannelReg
at:
offset: 0x0000
nelms: *numTriggers
stride: 0x100
#########################################################
EvrV2TriggerReg:
<<: *EvrV2TriggerReg
at:
offset: 0x1000
nelms: *numTriggers
stride: 0x100
#########################################################
|
# Copyright 2020 ETH Zurich and University of Bologna.
# Solderpad Hardware License, Version 0.51, see LICENSE for details.
# SPDX-License-Identifier: SHL-0.51
package:
name: future
dependencies:
axi: {path: ../../vendor/pulp_platform_axi}
common_cells: {path: ../../vendor/pulp_platform_common_cells}
sources:
# Level 0:
- src/axi_to_mem.sv
- src/axi_to_mem_interleaved.sv
- src/mem_to_axi_lite.sv
- src/idma_reg64_frontend_reg_pkg.sv
- src/idma_tf_id_gen.sv
- src/dma/axi_dma_data_path.sv
- src/axi_interleaved_xbar.sv
# Level 1:
- src/axi_xp.sv
- src/axi_zero_mem.sv
- src/idma_reg64_frontend_reg_top.sv
# Level 2:
- src/idma_reg64_frontend.sv
- src/dma/axi_dma_data_mover.sv
- src/dma/axi_dma_burst_reshaper.sv
# Level 2
- src/dma/axi_dma_backend.sv
- target: test
files:
- test/fixture_axi_dma_backend.sv
- test/tb_axi_dma_backend.sv
|
name: 'examples'
on:
push:
jobs:
examples:
name: 'Running examples'
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
with:
submodules: true
fetch-depth: 0
- name: Pull container images
run: |
docker pull hdlc/sim:osvb
docker pull hdlc/ghdl:yosys
docker pull hdlc/nextpnr:ice40
docker pull hdlc/icestorm
- run: cd examples/cocotb; bash run.sh
- run: make -C examples/ghdl sim
- run: make -C examples/ghdl syn
- run: make -C examples/impl
- run: make -C examples/verilator
|
name: coverage
on:
push:
branches:
- master
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Set up Python
uses: actions/setup-python@v2
- name: Generate coverage report
run: |
pip install pytest
pip install coverage
coverage run -m pytest
coverage xml
- name: Upload coverage to codecov
uses: codecov/codecov-action@v1
with:
token: ${{ secrets.CODECOV_TOKEN }}
file: ./coverage.xml
fail_ci_if_error: true
|
module: test_fifo_singleclock_standard
sources:
- ../verilog/fifo_singleclock_standard.sv
toplevel: fifo_singleclock_standard
simulators:
- vcs
parameters:
WIDTH: 16
DEPTH: 32
|
extends: default
page:
margin: [0.8in, 0.67in, 0.75in, 0.67in]
link:
font-color: #edac00
image:
align: center
caption:
align: center
running-content:
start-at: toc
header:
height: 0.65in
vertical-align: bottom
image-vertical-align: bottom
font-size: 11
border-color: #000000
border-width: 1
recto:
left:
content: '*The NEORV32 RISC-V Processor*'
right:
content: '*Visit on https://github.com/stnolting/neorv32[GitHub]*'
verso:
left:
content: '*The NEORV32 Processor*'
right:
content: '*Visit on https://github.com/stnolting/neorv32[GitHub]*'
footer:
start-at: toc
height: 0.75in
font-size: 10
border-color: #000000
border-width: 1
recto:
left:
content: '{page-number} / {page-count}'
center:
content: 'Copyright (c) 2021, <NAME>. All rights reserved.'
right:
content: '{docdate}'
verso:
left:
content: '{page-number} / {page-count}'
center:
content: 'NEORV32 Version: {revnumber}'
right:
content: '{docdate}'
|
---
input_file : ../akane/04_sorting_network.akd
output_file : 04_sorting_network.md
image_url :
"Fig.1 コンパレーター (昇順)" : "https://qiita-image-store.s3.ap-northeast-1.amazonaws.com/0/24981/55ce3859-6d0e-7fa8-979b-d0a9a9d75e76.jpeg"
"Fig.2 コンパレーター (降順)" : "https://qiita-image-store.s3.ap-northeast-1.amazonaws.com/0/24981/81fa2c1c-8ad8-14f2-168c-82a792eee608.jpeg"
"Fig.3 ソーティングネットワークの動作例 (4入力昇順)" : "https://qiita-image-store.s3.ap-northeast-1.amazonaws.com/0/24981/56daa7e6-213d-5749-c449-c2dff8ae466b.jpeg"
"Fig.4 ソーティングネットワークのステージ" : "https://qiita-image-store.s3.ap-northeast-1.amazonaws.com/0/24981/3b6647ba-fdd2-6175-9dd3-4e53b3eff008.jpeg"
"Fig.5 ソーティングネットワーク構成の VHDL 記述例" : "https://qiita-image-store.s3.ap-northeast-1.amazonaws.com/0/24981/6ceda746-e04c-73b9-fb13-2655a2d73dff.jpeg"
"Fig.6 ソーティングネットワーク構成の VHDL 記述例 (Stage 1)" : "https://qiita-image-store.s3.ap-northeast-1.amazonaws.com/0/24981/3508aa47-2064-e816-a77a-382dae6db754.jpeg"
"Fig.7 ソーティングネットワーク構成の VHDL 記述例 (Stage 2)" : "https://qiita-image-store.s3.ap-northeast-1.amazonaws.com/0/24981/5ffcd9e0-891c-0d3b-4c0e-4b34d87beb43.jpeg"
"Fig.8 ソーティングネットワーク構成の VHDL 記述例 (Stage 3)" : "https://qiita-image-store.s3.ap-northeast-1.amazonaws.com/0/24981/fb97e8a2-ffb2-6cd9-27d4-568b9d6dc51b.jpeg"
"Fig.9 ソーティングネットワークコアの構成" : "https://qiita-image-store.s3.ap-northeast-1.amazonaws.com/0/24981/936cd6bb-b10a-8681-b547-e33e18669f85.jpeg"
link_list :
- id : "「はじめに」"
title: "「VHDL で書くマージソーター(はじめに)」@Qiita"
url : "https://qiita.com/ikwzm/items/6665b2ef44d878a5b85f"
- id : "「ワードの定義」"
title: "「VHDL で書くマージソーター(ワードの定義)」@Qiita"
url : "https://qiita.com/ikwzm/items/bdcd8876317b908ff492"
- id : "「ワード比較器」"
title: "「VHDL で書くマージソーター(ワード比較器)」@Qiita"
url : "https://qiita.com/ikwzm/items/d5d1dd264b1670f33bd7"
- id : "「ソーティングネットワーク」"
title: "「VHDL で書くマージソーター(ソーティングネットワーク)」@Qiita"
url : "https://qiita.com/ikwzm/items/a1d06e47523759c726ae"
- id : "「バイトニックマージソート」"
title: "「VHDL で書くマージソーター(バイトニックマージソート)」@Qiita"
url : "https://qiita.com/ikwzm/items/366eacbf6a877994c955"
- id : "「バッチャー奇偶マージソート」"
title: "「VHDL で書くマージソーター(バッチャー奇偶マージソート)」@Qiita"
url : "https://qiita.com/ikwzm/items/c21a53f21b87408a7805"
- id : "「シングルワード マージソート ノード」"
title: "「VHDL で書くマージソーター(シングルワード マージソート ノード)」@Qiita"
url : "https://qiita.com/ikwzm/items/7fd7ef9ffc4d9b314fee"
- id : "「マルチワード マージソート ノード」"
title: "「VHDL で書くマージソーター(マルチワード マージソート ノード)」@Qiita"
url : "https://qiita.com/ikwzm/items/ed96b7a44b83bcee4ba5"
- id : "「マージソート ツリー」"
title: "「VHDL で書くマージソーター(マージソート ツリー)」@Qiita"
url : "https://qiita.com/ikwzm/items/1f76ae5cda95aaf92501"
- id : "「端数ワード処理」"
title: "「VHDL で書くマージソーター(端数ワード処理)」@Qiita"
url : "https://qiita.com/ikwzm/items/6b15340f1e05ef03f8d0"
- id : "「ストリーム入力」"
title: "「VHDL で書くマージソーター(ストリーム入力)」@Qiita"
url : "https://qiita.com/ikwzm/items/56e22511021a082a2ccd"
- id : "「ストリームフィードバック」"
title: "「VHDL で書くマージソーター(ストリームフィードバック)」@Qiita"
url : "https://qiita.com/ikwzm/items/e8c59c0ec92956c9355f"
- id : "「ArgSort IP」"
title: "「VHDL で書くマージソーター(ArgSort IP)」@Qiita"
url : "https://qiita.com/ikwzm/items/89fc9542492fca74c9e3"
- id : "「ArgSort-Ultra96」"
title: "「VHDL で書くマージソーター(ArgSort-Ultra96)」@Qiita"
url : "https://qiita.com/ikwzm/items/d58c9b77d038e23ac792"
- id : "「ArgSort-Kv260」"
title: "「VHDL で書くマージソーター(ArgSort-Kv260)」@Qiita"
url : "https://qiita.com/ikwzm/items/ec0f779534c44b35334a"
seg_level : -1
---
|
<filename>fpga/style_rules.yaml
rule:
global:
indentSize: 4
case: 'lower'
library_003:
disable: true # Allow doxygen comment before LIBRARY
library_004:
case: 'upper' # Upper-case LIBRARY keyword
library_005:
case: 'upper' # Upper-case USE keyword
library_008:
indentSize: 0 # Align USE with LIBRARY
library_009:
indentSize: 0 # Align USE comment with LIBRARY
entity_003:
disable: true # Allow doxygen comment before ENTITY
entity_004:
case: 'upper' # Upper-case ENTITY keyword
entity_006:
case: 'upper' # Upper-case IS keyword
entity_010:
case: 'upper' # Upper-case END keyword
entity_014:
case: 'upper' # Upper-case ENTITY keyword
generic_009:
case: 'upper' # Upper-case GENERIC keyword
architecture_003:
disable: true # Allow doxygen comment before architecture
architecture_004:
case: 'upper' # Upper-case ARCHITECTURE keyword
architecture_009:
case: 'upper' # Upper-case END keyword
architecture_019:
case: 'upper' # Upper-case OF keyword
architecture_020:
case: 'upper' # Upper-case IS keyword
architecture_021:
case: 'upper' # Upper-case BEGIN keyword
architecture_028:
case: 'upper' # Upper-case ARCHITECTURE keyword
type_002:
case: 'upper' # Upper-case TYPE keyword
type_010:
disable: true # Allow doxygen comment before TYPE
type_013:
case: 'upper' # Upper-case IS keyword
constant_002:
case: 'upper' # Upper-case CONSTANT keyword
constant_012:
disable: true # Work around bug in VSG 2.0.0
component_003:
disable: true # Allow comment before component
component_004:
case: 'upper' # Upper-case COMPONENT keyword
component_006:
case: 'upper' # Upper-case IS keyword
component_010:
case: 'upper' # Upper-case END keyword
component_014:
case: 'upper' # Upper-case COMPONENT keyword
instantiation_006:
case: 'upper' # Upper-case PORT MAP keyword
instantiation_013:
case: 'upper' # Upper-case GENERIC MAP keyword
instantiation_027:
case: 'upper' # Upper-case ENTITY keyword
generate_004:
disable: true # Allow comment before component
generate_009:
case: 'upper' # Upper-case END keyword
generate_010:
case: 'upper' # Upper-case GENERATE keyword
signal_002:
case: 'upper' # Upper-case SIGNAL keyword
function_004:
case: 'upper' # Upper-case BEGIN keyword
function_005:
case: 'upper' # Upper-case FUNCTION keyword
function_006:
disable: true # Allow comment before function
function_013:
case: 'upper' # Upper-case END keyword
function_014:
case: 'upper' # Upper-case FUNCTION keyword
process_004:
case: 'upper' # Upper-case BEGIN keyword
process_005:
case: 'upper' # Upper-case PROCESS keyword
process_008:
case: 'upper' # Upper-case END keyword
process_009:
case: 'upper' # Upper-case PROCESS keyword
process_013:
case: 'upper' # Upper-case IS keyword
process_029:
clock: 'edge'
process_036:
disable: false
prefixes: [ 'pr_' ]
if_025:
case: 'upper' # Upper-case IF keyword
if_026:
case: 'upper' # Upper-case ELSIF keyword
if_027:
case: 'upper' # Upper-case ELSE keyword
if_028:
case: 'upper' # Upper-case END keyword
if_029:
case: 'upper' # Upper-case THEN keyword
if_034:
case: 'upper' # Upper-case IF keyword
case_014:
case: 'upper' # Upper-case CASE keyword
case_015:
case: 'upper' # Upper-case IS keyword
case_016:
case: 'upper' # Upper-case WHEN keyword
case_017:
case: 'upper' # Upper-case END keyword
case_018:
case: 'upper' # Upper-case CASE keyword
range_001:
case: 'upper' # Upper-case DOWNTO keyword
range_002:
case: 'upper' # Upper-case TO keyword
variable_002:
case: 'upper' # Upper-case VARIABLE keyword
variable_012:
disable: false
prefixes: [ 'v_' ]
port_017:
case: 'upper' # Upper-case PORT keyword
port_019:
case: 'upper' # Upper-case PORT direction keywords (IN/OUT/INOUT)
|
<gh_stars>1-10
# Ignore coverage for http.go and resolver.go which don't have tests.
ignore:
- "http.go"
- "resolver.go"
|
<filename>.github/workflows/gh-pages.yml
# Generate doxygen-based documentation and deploy it to neorv32 github-pages
name: 'Deploy SW Framework Documentation to GitHub-Pages'
on:
push:
branches:
- master
paths:
- 'sw/**'
pull_request:
branches:
- master
paths:
- 'sw/**'
workflow_dispatch:
jobs:
build:
runs-on: ubuntu-latest
name: '🐧 Ubuntu-Latest'
# Steps represent a sequence of tasks that will be executed as part of the job
steps:
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
- uses: actions/checkout@v2
- name: '🛠️ Modifying Doxyfile'
run: |
ls -al ./docs
sed -i 's/$(PWD)\/../$(GITHUB_WORKSPACE)/g' ./docs/Doxyfile
- name: '📚 Generate Doxygen Documentation'
uses: mattnotmitt/[email protected]
with:
working-directory: '.'
doxyfile-path: 'docs/Doxyfile'
- name: '🚀 Deploy to GitHub-Pages'
uses: peaceiris/actions-gh-pages@v3
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
publish_dir: ./doxygen_build/html
|
<gh_stars>0
format_version: v1.0
type: UniqueComponent
attributes: {id: spi1}
children:
- type: Symbols
children:
- type: Integer
attributes: {id: SPI_BRG_VALUE}
children:
- type: Values
children:
- type: Dynamic
attributes: {id: spi1, value: '23'}
- type: KeyValueSet
attributes: {id: SPI_SPICON_MSSEN}
children:
- type: Values
children:
- type: User
attributes: {value: '1'}
- type: ElementPosition
attributes: {x: '51', y: '198', id: spi1}
|
module: test_noc_buffer
sources:
- ../verilog/noc_buffer.sv
toplevel: noc_buffer
simulators:
- vcs
parameters:
FLIT_WIDTH: [8,32]
DEPTH: [2,4,16]
parameter_includes:
- FLIT_WIDTH: 32
DEPTH: 8
# Test case to test the FULLPACKET feature. The test is hardcoded and only
# works with these parameters.
- FLIT_WIDTH: 8
DEPTH: 4
FULLPACKET: 1
|
language: c
install: wget https://raw.githubusercontent.com/ocaml/ocaml-travisci-skeleton/master/.travis-opam.sh
script: bash -ex .travis-opam.sh
env:
- PACKAGE="tls" OCAML_VERSION=4.01 EXTRA_REMOTES=https://github.com/mirage/mirage-dev.git PINS="nocrypto asn1-combinators x509"
- PACKAGE="tls" OCAML_VERSION=latest DEPOPTS=lwt EXTRA_REMOTES=https://github.com/mirage/mirage-dev.git PINS="nocrypto asn1-combinators x509"
- PACKAGE="tls" OCAML_VERSION=latest UPDATE_GCC_BINUTILS=1 DEPOPTS=mirage-types-lwt EXTRA_REMOTES=https://github.com/mirage/mirage-dev.git PINS="nocrypto asn1-combinators x509" POST_INSTALL_HOOK="./.travis-test-mirage.sh"
notifications:
email: false
|
package:
name: lowrisc_i2c
description: "lowRISC I2C"
authors: ["lowRISC Contributors"]
dependencies:
lowrisc_prim: {path: ../prim}
sources:
- rtl/i2c_reg_pkg.sv
- rtl/i2c_reg_top.sv
- rtl/i2c_fsm.sv
- rtl/i2c_core.sv
- rtl/i2c.sv
|
---
name: loopback
board: boards/alpha250
version: 0.2.0
cores:
- fpga/cores/axi_ctl_register_v1_0
- fpga/cores/axi_sts_register_v1_0
- fpga/cores/dna_reader_v1_0
- fpga/cores/edge_detector_v1_0
- fpga/cores/comparator_v1_0
- fpga/cores/unrandomizer_v1_0
- boards/alpha250/cores/precision_dac_v1_0
- boards/alpha250/cores/spi_cfg_v1_0
memory:
- name: control
offset: '0x60000000'
range: 4K
- name: ps_control
offset: '0x64000000'
range: 4K
- name: status
offset: '0x50000000'
range: 4K
- name: ps_status
offset: '0x54000000'
range: 4K
- name: xadc
offset: '0x43C00000'
range: 64K
control_registers:
- mmcm
- precision_dac_ctl
- precision_dac_data[2]
status_registers:
- adc[n_adc]
ps_control_registers:
- spi_cfg_data
- spi_cfg_cmd
ps_status_registers:
- spi_cfg_sts
parameters:
fclk0: 200000000 # FPGA clock speed in Hz
adc_clk: 250000000
n_adc: 2
xdc:
- boards/alpha250/config/ports.xdc
drivers:
- boards/alpha250/drivers/common.hpp
- boards/alpha250/drivers/eeprom.hpp
- boards/alpha250/drivers/gpio-expander.hpp
- boards/alpha250/drivers/temperature-sensor.hpp
- boards/alpha250/drivers/power-monitor.hpp
- boards/alpha250/drivers/clock-generator.hpp
- boards/alpha250/drivers/ltc2157.hpp
- boards/alpha250/drivers/ad9747.hpp
- boards/alpha250/drivers/precision-adc.hpp
- boards/alpha250/drivers/precision-dac.hpp
- boards/alpha250/drivers/spi-config.hpp
web:
- web/index.html
- web/main.css
- web/koheron.ts
|
title: R. Sameni Homepage
author: Copyright 2020, <NAME>
description: > # this means to ignore newlines until "show_excerpts:"
www.sameni.info
show_excerpts: false # set to true to show excerpts on the homepage
# Minima date format
# refer to https://shopify.github.io/liquid/filters/date/ if you want to customize this
minima:
date_format: "%b %-d, %Y"
# generate social links in footer
social_links:
# twitter: jekyllrb
github: rsameni
# devto: jekyll
# dribbble: jekyll
# facebook: jekyll
# flickr: jekyll
# instagram: jekyll
# linkedin: jekyll
# pinterest: jekyll
# youtube: jekyll
# youtube_channel: UC8CXR0-3I70i1tfPg1PAE1g
# youtube_channel_name: CloudCannon
# telegram: jekyll
# googleplus: +jekyll
# microdotblog: jekyll
# keybase: jekyll
# Mastodon instances
# mastodon:
# - username: jekyll
# instance: example.com
# - username: jekyll2
# instance: example.com
# GitLab instances
# gitlab:
# - username: jekyll
# instance: example.com
# - username: jekyll2
# instance: example.com
# Build settings
theme: minima
plugins:
- jekyll-feed
- jekyll-seo-tag
includes_dir:
Research/Publications
# If you want to link only specific pages in your header, uncomment
# this and add the path to the pages in order as they should show up
#header_pages:
# - about.md
header_pages:
- Teaching.md
- Projects.md
- Publications.md
- ResearchPool.md
- Team.md
- Supervisions.md
google_analytics: UA-2117082-4
#github:
# is\_project\_page:false
# github:
# is_project_page: false
#theme: jekyll-theme-minima
#theme: jekyll-theme-slate
#   # space character to get empty title
#footnote_backlink: ''
|
<filename>models/AI-Model-Zoo/model-list/cf_reid_market1501_160_80_0.95G_1.4/model.yaml
# Copyright 2019 Xilinx Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
description: 'Person re-identification model (backbone: resnet18).'
input size: 160*80
float ops: 0.95G
task: person reid
framework: caffe
prune: 'no'
version: 1.4
files:
- name: cf_reid_market1501_160_80_0.95G_1.4
type: float & quantized
board: GPU
download link: https://www.xilinx.com/bin/public/openDownload?filename=cf_reid_market1501_160_80_0.95G_1.4.zip
checksum: 9ae540a8451db0aaba8c3a8b8d62df01
- name: reid
type: xmodel
board: zcu102 & zcu104 & kv260
download link: https://www.xilinx.com/bin/public/openDownload?filename=reid-zcu102_zcu104_kv260-r1.4.0.tar.gz
checksum: 57a5ecab56f220e494eb80a1aa683da3
- name: reid
type: xmodel
board: vck190
download link: https://www.xilinx.com/bin/public/openDownload?filename=reid-vck190-r1.4.0.tar.gz
checksum: 42c181f4563b9b9ecdbc1432a125522b
- name: reid
type: xmodel
board: vck5000
download link: https://www.xilinx.com/bin/public/openDownload?filename=reid-vck5000-DPUCVDX8H-r1.4.0.tar.gz
checksum: 44827cfa86fe23120dc0d3be81661fb0
- name: reid
type: xmodel
board: u50-DPUCAHX8H & u50lv-DPUCAHX8H & u280-DPUCAHX8H
download link: https://www.xilinx.com/bin/public/openDownload?filename=reid-u50-u50lv-u280-DPUCAHX8H-r1.4.0.tar.gz
checksum: 1bca6fdc14532ec9dc3394526c626940
- name: reid
type: xmodel
board: u50-DPUCAHX8L & u50lv-DPUCAHX8L & u280-DPUCAHX8L
download link: https://www.xilinx.com/bin/public/openDownload?filename=reid-u50-u50lv-u280-DPUCAHX8L-r1.4.0.tar.gz
checksum: 66fbc390e7780ed95c10713da37bfc45
- name: reid
type: xmodel
board: u200-DPUCADF8H & u250-DPUCADF8H
download link: https://www.xilinx.com/bin/public/openDownload?filename=reid-u200-u250-r1.4.0.tar.gz
checksum: 06c59b27e20ff3e36b6f2cca3813c643
license: https://github.com/Xilinx/Vitis-AI/blob/master/LICENSE
|
<reponame>scale-lab/PVTsensors
#
# List of IPs and relative branch/commit-hash/tag.
# Uses the YAML syntax.
#
# Examples:
#
# or10n:
# commit: tags/PULP3_final
# domain: [cluster]
# udma:
# commit: 62b10440
# domain: [soc]
# axi_slice:
# commit: master
# domain: [soc,cluster]
# If a *tag* or *commit* is referenced, the IP will be in a
# state of DETACHED HEAD. Before committing any additional
# work, make sure to checkout a branch.
#
# APB IPs
apb/apb_node:
commit: pulpinov1
apb/apb_event_unit:
commit: pulpinov1
apb/apb_fll_if:
commit: pulpinov1
apb/apb_gpio:
commit: pulpinov1
apb/apb_i2c:
commit: pulpinov1
apb/apb_pulpino:
commit: pulpinov1
apb/apb_spi_master:
commit: pulpinov1
apb/apb_timer:
commit: pulpinov1
apb/apb_uart:
commit: pulpinov1
apb/apb_uart_sv:
commit: pulpinov1
apb/apb2per:
commit: pulpinov1
# AXI IPs
axi/axi2apb:
commit: pulpinov1
axi/axi_mem_if_DP:
commit: pulpinov1
axi/axi_node:
commit: pulpinov1
axi/axi_slice:
commit: pulpinov1
axi/axi_slice_dc:
commit: pulpinov1
axi/axi_spi_master:
commit: pulpinov1
axi/axi_spi_slave:
commit: pulpinov1
axi/core2axi:
commit: pulpinov1
adv_dbg_if:
commit: pulpinov1
riscv:
commit: pulpinov1
zero-riscy:
commit: pulpinov1
fpu:
commit: pulpinov1
|
# Copyright 2020 ETH Zurich and University of Bologna.
# Solderpad Hardware License, Version 0.51, see LICENSE for details.
# SPDX-License-Identifier: SHL-0.51
package:
name: system-occamy
authors:
- <NAME> <<EMAIL>>
- <NAME> <<EMAIL>>
dependencies:
# axi_riscv_atomics: {path: ../../vendor/pulp_platform_axi_riscv_atomics}
snitch_read_only_cache: {path: ../../ip/snitch_read_only_cache}
snitch-cluster: {path: ../../ip/snitch_cluster}
spm-interface: {path: ../../ip/spm_interface}
cva6: {path: ../../vendor/openhwgroup_cva6}
apb_uart: {path: ../../vendor/pulp_platform_apb_uart}
lowrisc_rv_plic: {path: ../../vendor/lowrisc_opentitan/rv_plic}
lowrisc_gpio: {path: ../../vendor/lowrisc_opentitan/gpio}
lowrisc_spi_host: {path: ../../vendor/lowrisc_opentitan/spi_host}
lowrisc_i2c: {path: ../../vendor/lowrisc_opentitan/i2c}
test: {path: ../../ip/test}
apb: {path: ../../vendor/pulp_platform_apb}
timer: {path: ../../vendor/pulp_platform_apb_timer}
sources:
# Level 0:
- src/occamy_cluster_wrapper.sv
- src/occamy_soc_ctrl/occamy_soc_reg_pkg.sv
- src/occamy_soc_ctrl/occamy_soc_reg_top.sv
- src/occamy_quadrant_s1_ctrl/occamy_quadrant_s1_reg_pkg.sv
- src/occamy_quadrant_s1_ctrl/occamy_quadrant_s1_reg_top.sv
- src/rv_plic/rv_plic_reg_pkg.sv
- src/rv_plic/rv_plic_reg_top.sv
- src/rv_plic/rv_plic.sv
- src/clint/clint_reg_pkg.sv
- src/clint/clint_reg_top.sv
- src/clint/clint.sv
- src/reg_to_apb.sv
# Level 1:
- src/occamy_soc_ctrl/occamy_soc_ctrl.sv
- src/occamy_pkg.sv
# Level 2:
- src/occamy_quadrant_s1_ctrl.sv
# Level 3:
- src/occamy_cva6.sv
- src/occamy_quadrant_s1.sv
# Level 4:
- src/occamy_soc.sv
# Level 5:
- src/occamy_top.sv
# # Level 6:
- target: xilinx
files:
- src/occamy_xilinx.sv
- target: any(simulation, verilator)
files:
- test/uartdpi/uartdpi.sv
- test/testharness.sv
# - target: any(simulation, verilator)
# files:
# - test/tb_memory.sv
# - test/testharness.sv
# # Level 4:
# - target: test
# files:
# - test/tb_bin.sv
|
module: test_scm
sources:
- ../../../interfaces/common/dii_channel.sv
- ../../../blocks/regaccess/common/osd_regaccess.sv
- ../common/osd_scm.sv
toplevel: osd_scm
simulators:
- vcs
parameters:
SYSTEM_VENDOR_ID: 1
SYSTEM_DEVICE_ID: 1
NUM_MOD: 1
MAX_PKT_LEN: 12
|
<filename>models/AI-Model-Zoo/model-list/pt_FairMOT_mixed_640_480_0.5_36G_2.0/model.yaml
# Copyright 2019 Xilinx Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
description: FairMOT Joint detection and Tracking
input size: 640*480
float ops: 36G
task: denoising
framework: pytorch
prune: '0.5'
version: 1.4
files:
- name: pt_FairMOT_mixed_640_480_0.5_36G_2.0
type: float & quantized
board: GPU
download link: https://www.xilinx.com/bin/public/openDownload?filename=pt_FairMOT_mixed_640_480_0.5_36G_2.0.zip
checksum: 0639b0645d60d605cba11d8ccf1baa09
- name: FairMot_pt
type: xmodel
board: zcu102 & zcu104 & kv260
download link: https://www.xilinx.com/bin/public/openDownload?filename=FairMot_pt-zcu102_zcu104_kv260-r2.0.0.tar.gz
checksum: 1515645a7aebafdd0ac15b242f37b033
- name: FairMot_pt
type: xmodel
board: vck190
download link: https://www.xilinx.com/bin/public/openDownload?filename=FairMot_pt-vck190-r2.0.0.tar.gz
checksum: ef4ca9ee00fc955235f673b8f8e1291a
- name: FairMot_pt
type: xmodel
board: vck50006pe-DPUCVDX8H-DWC
download link: https://www.xilinx.com/bin/public/openDownload?filename=FairMot_pt-vck50006pe-DPUCVDX8H-DWC-r2.0.0.tar.gz
checksum: a51d0232604aaf3bd837dd4912c31bc2
- name: FairMot_pt
type: xmodel
board: vck50008pe-DPUCVDX8H
download link: https://www.xilinx.com/bin/public/openDownload?filename=FairMot_pt-vck50008pe-DPUCVDX8H-r2.0.0.tar.gz
checksum: 44090d010070e2d468c89bcb16e15466
- name: FairMot_pt
type: xmodel
board: u50lv-DPUCAHX8H
download link: https://www.xilinx.com/bin/public/openDownload?filename=FairMot_pt-u50lv-DPUCAHX8H-r2.0.0.tar.gz
checksum: 1afc9a382a2e41bc2272a5355d8f4c41
- name: FairMot_pt
type: xmodel
board: u50lv-DPUCAHX8H-DWC & u55c-DPUCAHX8H-DWC
download link: https://www.xilinx.com/bin/public/openDownload?filename=FairMot_pt-u55c-u50lv-DPUCAHX8H-DWC-r2.0.0.tar.gz
checksum: 66943cfd087fd3ea485cce227929a740
license: https://github.com/Xilinx/Vitis-AI/blob/master/LICENSE
|
<gh_stars>1-10
hwpe-ctrl:
vlog_opts: [
+nowarnSVCHK,
]
incdirs: [
rtl,
]
files: [
rtl/hwpe_ctrl_package.sv,
rtl/hwpe_ctrl_interfaces.sv,
rtl/hwpe_ctrl_regfile.sv,
rtl/hwpe_ctrl_regfile_latch.sv,
rtl/hwpe_ctrl_regfile_latch_test_wrap.sv,
rtl/hwpe_ctrl_slave.sv,
rtl/hwpe_ctrl_seq_mult.sv,
rtl/hwpe_ctrl_uloop.sv,
]
tb_hwpe_ctrl:
targets: [
rtl
]
files: [
tb/tb_hwpe_ctrl_seq_mult.sv,
]
|
---
algorithm:
class: ParetoGPSimplified
population_size: 600
archive_size: 200
generations_per_cascade: 10
mutation_probability: 0.00
shorten_individual: true
tournament_size: 10
init:
method: ramped # grow or full or ramped
sensible_depth: 7
termination:
max_steps: 1000
on_individual: :stopping_condition
grammar:
class: Abnf::File
filename: sample/toy_regression/grammar.abnf
mapper:
class: DepthLocus
crossover:
class: CrossoverRipple
margin: 2 #1
step: 2
mutation:
class: MutationRipple
store:
class: Store
filename: ./toy_paretogp_simplified.store
report:
class: ToyReport
require: sample/toy_regression/toy_report.rb
individual:
class: ToyIndividualMultiObjective
require: sample/toy_regression/toy_individual.rb
shorten_chromozome: false
|
axi_slice_dc:
files: [
src/axi_slice_dc_master.sv,
src/axi_slice_dc_slave.sv,
src/dc_data_buffer.sv,
src/dc_full_detector.v,
src/dc_synchronizer.v,
src/dc_token_ring_fifo_din.v,
src/dc_token_ring_fifo_dout.v,
src/dc_token_ring.v,
src/axi_slice_dc_master_wrap.sv,
src/axi_slice_dc_slave_wrap.sv,
src/axi_cdc.sv,
]
|
<filename>firmware/releases.yaml
GitBase: ..
TopRoguePackage: lcls2_epix_hr_pcie
RoguePackages:
- submodules/axi-pcie-core/python
- submodules/clink-gateway-fw-lib/python
- submodules/l2si-core/python
- submodules/lcls2-pgp-fw-lib/python
- submodules/lcls-timing-core/python
- submodules/surf/python
- python
RogueConfig:
- ../software/config
RogueScripts:
# - ../software/scripts/devGui
Targets:
################################################################################
Lcls2EpixHrSlacPgpCardG4Pgp2b:
ImageDir: targets/SlacPgpCardG4/Lcls2EpixHrSlacPgpCardG4Pgp2b/images
Extensions:
- mcs
Lcls2EpixHrSlacPgpCardG4Pgp4_6Gbps:
ImageDir: targets/SlacPgpCardG4/Lcls2EpixHrSlacPgpCardG4Pgp4_6Gbps/images
Extensions:
- mcs
Lcls2EpixHrSlacPgpCardG4Pgp4_10Gbps:
ImageDir: targets/SlacPgpCardG4/Lcls2EpixHrSlacPgpCardG4Pgp4_10Gbps/images
Extensions:
- mcs
################################################################################
Lcls2EpixHrXilinxKcu1500Pgp2b:
ImageDir: targets/XilinxKcu1500/Lcls2EpixHrXilinxKcu1500Pgp2b/images
Extensions:
- mcs
Lcls2EpixHrXilinxKcu1500Pgp4_6Gbps:
ImageDir: targets/XilinxKcu1500/Lcls2EpixHrXilinxKcu1500Pgp4_6Gbps/images
Extensions:
- mcs
Lcls2EpixHrXilinxKcu1500Pgp4_10Gbps:
ImageDir: targets/XilinxKcu1500/Lcls2EpixHrXilinxKcu1500Pgp4_10Gbps/images
Extensions:
- mcs
################################################################################
Releases:
lcls2_epix_hr_pcie:
Primary: True
Targets:
#######################################
# - Lcls2EpixHrSlacPgpCardG4Pgp2b
- Lcls2EpixHrSlacPgpCardG4Pgp4_6Gbps
# - Lcls2EpixHrSlacPgpCardG4Pgp4_10Gbps
#######################################
# - Lcls2EpixHrXilinxKcu1500Pgp2b
- Lcls2EpixHrXilinxKcu1500Pgp4_6Gbps
# - Lcls2EpixHrXilinxKcu1500Pgp4_10Gbps
#######################################
Types:
- Rogue
|
<reponame>zephyr-cosim/zephyr-cosim
{% set filename = "{{name}}_64.yaml" %}
identifier: {{name}}_64
name: Native 64-bit POSIX port
type: native
arch: posix
ram: 65536
flash: 65536
toolchain:
- host
- llvm
supported:
- eeprom
- netif:eth
- usb_device
- adc
|
package:
name: riscv
dependencies:
fpnew: { git: "https://github.com/pulp-platform/fpnew.git", version: 0.6.1 }
common_cells: { git: "https://github.com/pulp-platform/common_cells.git", version: 1.16.4 }
tech_cells_generic: { git: "https://github.com/pulp-platform/tech_cells_generic.git", version: 0.1.1 }
sources:
include_dirs:
- rtl/include
files:
- rtl/include/cv32e40p_apu_core_pkg.sv
- rtl/include/cv32e40p_pkg.sv
- rtl/cv32e40p_alu.sv
- rtl/cv32e40p_alu_div.sv
- rtl/cv32e40p_aligner.sv
- rtl/cv32e40p_compressed_decoder.sv
- rtl/cv32e40p_controller.sv
- rtl/cv32e40p_cs_registers.sv
- rtl/cv32e40p_decoder.sv
- rtl/cv32e40p_int_controller.sv
- rtl/cv32e40p_ex_stage.sv
- rtl/cv32e40p_hwloop_controller.sv
- rtl/cv32e40p_hwloop_regs.sv
- rtl/cv32e40p_id_stage.sv
- rtl/cv32e40p_if_stage.sv
- rtl/cv32e40p_load_store_unit.sv
- rtl/cv32e40p_mult.sv
- rtl/cv32e40p_prefetch_buffer.sv
- rtl/cv32e40p_obi_interface.sv
- rtl/cv32e40p_core.sv
- rtl/cv32e40p_apu_disp.sv
- rtl/cv32e40p_fetch_fifo.sv
- rtl/cv32e40p_popcnt.sv
- rtl/cv32e40p_ff_one.sv
- rtl/cv32e40p_sleep_unit.sv
- target: asic
files:
- rtl/cv32e40p_register_file_latch.sv
- target: not(asic)
files:
- rtl/cv32e40p_register_file_ff.sv
- target: rtl
files:
- bhv/cv32e40p_sim_clock_gate.sv
|
<filename>infrastructure/kctf/base/k8s/deployment/autoscaling.yaml
apiVersion: "autoscaling/v1"
kind: "HorizontalPodAutoscaler"
metadata:
name: "chal"
spec:
scaleTargetRef:
kind: "Deployment"
name: "chal"
apiVersion: "apps/v1beta1"
minReplicas: 1
maxReplicas: 1
targetCPUUtilizationPercentage: 50
|
<reponame>f110/wing
apiVersion: harbor.f110.dev/v1alpha1
kind: HarborProject
metadata:
name: test
spec:
public: true
|
<reponame>SubjeBilisim/anasymod<gh_stars>10-100
analog_probes:
v_in_probe:
abspath: 'tb_i.v_in'
range: 10.0
v_out_probe:
abspath: 'tb_i.v_out'
range: 10.0
analog_ctrl_inputs:
v_in:
abspath: 'tb_i.v_in'
range: 10
init_value: 0.0
analog_ctrl_outputs:
v_out:
abspath: 'tb_i.v_out'
range: 10
|
<filename>documentation/metadata/vchip.yaml
---
title: Unnamed DMA Chip
author:
- Felix "xq" Queißner
date: May 07, 2020
abstract: |
<p>The RAM Blitter is a DMA unit that is developed for general purpose DMA transfers, as well as framebuffer modifications. It supports basic linear memory transfers as well as rectangle copies with image manipulation.</p>
<p>The RAM blitter also has a modes for drawing 2d vector graphics and a filling operation that allows color transfers.</p>
|
<gh_stars>10-100
# Device description for the Keithley 6517A Electrometer.
# set_ function expect a parameter, get_ function return a parameter.
# Just the very basic commands are implemented here.
identifier : KEITHLEY INSTRUMENTS INC.,MODEL 6517A
remote_enable : REN
on : OUTP ON
off : OUTP OFF
#get_current : MEAS:CURR?
get_current : READ?
set_current_limit : SENS:CURR:PROT
set_voltage : SOUR:VOLT
get_voltage : MEAS:VOLT?
trigger : TRIG:GET
set_current_range : CURR:RANG
|
<reponame>diorga/snitch<filename>hw/vendor/pulp_platform_common_cells/.travis.yml
language: cpp
cache:
apt: true
directories:
$VERILATOR_ROOT
timeout: 1000
env:
global:
- VERILATOR_ROOT="/home/travis/verilator-4.014/"
# required packages to install
addons:
apt:
sources:
- ubuntu-toolchain-r-test
packages:
- gcc-7
- g++-7
- gperf
- autoconf
- automake
- autotools-dev
- libmpc-dev
- libmpfr-dev
- libgmp-dev
- gawk
- build-essential
- bison
- flex
- texinfo
- python-pexpect
- libusb-1.0-0-dev
- default-jdk
- zlib1g-dev
- valgrind
before_install:
# setup dependent paths
- export PATH=$VERILATOR_ROOT/bin:$PATH
- export C_INCLUDE_PATH=$VERILATOR_ROOT/include
- export CPLUS_INCLUDE_PATH=$VERILATOR_ROOT/include
- mkdir -p tmp
stages:
- compile
- test
jobs:
include:
- stage: compile
name: compile verilator
script:
- ci/install-verilator.sh
- stage: test
name: test encode
script:
- make
- ./obj_dir/Vecc_encode > /dev/zero
- stage: test
name: test decode
script:
- make
- ./obj_dir/Vecc_decode > /dev/zero
|
top:
cover_percentage: 40.28
coverage: 29
size: 72
type: <class 'cocotb_coverage.coverage.CoverItem'>
top.covercross:
cover_percentage: 30.0
coverage: 3
size: 10
type: <class 'cocotb_coverage.coverage.CoverItem'>
top.covercross.cr0:
at_least: 1
bins:_hits:
((0, 5), 0): 1
((0, 5), 1): 1
((0, 5), 2): 0
((0, 5), 3): 0
((0, 5), 4): 0
((5, 10), 0): 0
((5, 10), 1): 0
((5, 10), 2): 0
((5, 10), 3): 0
((5, 10), 4): 1
cover_percentage: 30.0
coverage: 3
size: 10
type: <class 'cocotb_coverage.coverage.CoverCross'>
weight: 1
top.coveritem:
cover_percentage: 41.94
coverage: 26
size: 62
type: <class 'cocotb_coverage.coverage.CoverItem'>
top.coveritem.config:
cover_percentage: 0.0
coverage: 0
size: 2
type: <class 'cocotb_coverage.coverage.CoverItem'>
top.coveritem.config.cp0:
at_least: 5
bins:_hits:
(0, 5): 4
(5, 10): 3
cover_percentage: 0.0
coverage: 0
size: 2
type: <class 'cocotb_coverage.coverage.CoverPoint'>
weight: 1
top.coveritem.signal:
cover_percentage: 43.33
coverage: 26
size: 60
type: <class 'cocotb_coverage.coverage.CoverItem'>
top.coveritem.signal.cp1:
at_least: 1
bins:_hits:
0: 1
1: 0
2: 0
3: 0
4: 1
cover_percentage: 40.0
coverage: 20
size: 50
type: <class 'cocotb_coverage.coverage.CoverPoint'>
weight: 10
top.coveritem.signal.cp2:
at_least: 1
bins:_hits:
0: 1
1: 0
2: 0
3: 0
4: 0
5: 1
6: 1
7: 1
8: 1
9: 1
cover_percentage: 60.0
coverage: 6
size: 10
type: <class 'cocotb_coverage.coverage.CoverPoint'>
weight: 1
|
# Private CI trigger. Used to run tooling that can't currently be shared
# publicly.
trigger:
batch: true
branches:
include:
- '*'
tags:
include:
- "*"
pr:
branches:
include:
- '*'
# The runner used for private CI enforces the use of the template below. All
# build steps need to be placed into the template.
resources:
repositories:
- repository: cocotb-private-ci
type: github
endpoint: cocotb
name: cocotb/cocotb-private-ci
extends:
template: jobs.yml@cocotb-private-ci
|
site_name: Croyde RISC-V
theme: readthedocs
nav:
- Home: index.md
- Project Organisation: project-organisation.md
- Requirements: functional-requirements.md
- Instruction Listing: instruction-listing.md
- Pipeline: pipeline.md
- Core Complex: ccx.md
- Memory Interfaces: memory-interface.md
- Flow - Synthesis: flows-synthesis.md
- Flow - Unit Tests: flows-unit-tests.md
- Flow - riscv-formal: flows-riscv-formal.md
- Flow - Embench: embench.md
- Flow - rvkrypto: flows-rvkrypto.md
- Energy Efficiency : energy-efficiency.md
- Todo: todo.md
|
name: Bump version
on:
workflow_dispatch:
schedule:
- cron: '0 0 * * *'
jobs:
bump-version:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v2
with:
fetch-depth: 0
- name: Take last commit
id: log
run: echo "::set-output name=message::$(git log --no-merges -1 --oneline)"
- name: Take repository
id: repo
run: echo "::set-output name=message::$GITHUB_REPOSITORY"
- name: Bump version
if: "!contains(steps.log.outputs.message, 'Bump version') && contains(steps.repo.outputs.message, 'YosysHQ/yosys')"
run: |
make bumpversion
git config --local user.email "41898282+github-actions[bot]<EMAIL>"
git config --local user.name "github-actions[bot]"
git add Makefile
git commit -m "Bump version"
- name: Push changes # push the output folder to your repo
if: "!contains(steps.log.outputs.message, 'Bump version') && contains(steps.repo.outputs.message, 'YosysHQ/yosys')"
uses: ad-m/github-push-action@master
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
|
<reponame>ChipFlow/mpw4<gh_stars>1-10
name: Flow CI test
on: [push, pull_request]
jobs:
Build-yosys:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
with:
submodules: recursive
- name: Install
run: |
sudo apt-get update
sudo apt-get install git make cmake libboost-all-dev python3-dev libeigen3-dev tcl-dev clang bison flex swig
- name: ccache
uses: hendrikmuhs/ccache-action@v1
- name: Get yosys
run: |
git clone https://github.com/YosysHQ/yosys.git
cd yosys
echo "YOSYS_SHA=$(git rev-parse HEAD)" >> $GITHUB_ENV
- name: Cache yosys installation
uses: actions/cache@v2
id: cache-yosys
with:
path: .yosys
key: cache-yosys-${{ env.YOSYS_SHA }}
- name: Build yosys
run: |
export PATH="/usr/lib/ccache:/usr/local/opt/ccache/libexec:$PATH"
source ./.github/ci/build_deps.sh
build_yosys
if: steps.cache-yosys.outputs.cache-hit != 'true'
Build-coriolis:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
with:
submodules: recursive
- name: Install
run: |
sudo apt-get update
sudo apt-get install git make cmake libboost-all-dev python3-dev libeigen3-dev tcl-dev clang bison flex swig qt5-default python3-pyqt5 libqt5svg5-dev qttools5-dev rapidjson-dev libqwt-qt5-dev
- name: ccache
uses: hendrikmuhs/ccache-action@v1
- name: Get coriolis
run: |
cd $HOME
mkdir -p ./coriolis-2.x/src/support
cd ./coriolis-2.x/src/support
git clone https://github.com/miloyip/rapidjson
cd ..
git clone https://gitlab.lip6.fr/vlsi-eda/coriolis
cd coriolis
echo "CORIOLIS_SHA=$(git rev-parse HEAD)" >> $GITHUB_ENV
- name: Cache coriolis installation
uses: actions/cache@v2
id: cache-coriolis
with:
path: /home/runner/coriolis-2.x/Linux.x86_64/Release.Shared/install
key: cache-coriolis-v2-${{ env.CORIOLIS_SHA }}
- name: Build coriolis
run: |
export PATH="/usr/lib/ccache:/usr/local/opt/ccache/libexec:$PATH"
source ./.github/ci/build_deps.sh
build_coriolis
if: steps.cache-coriolis.outputs.cache-hit != 'true'
Build-soc:
runs-on: ubuntu-latest
needs: [Build-yosys, Build-coriolis]
steps:
- uses: actions/checkout@v2
with:
submodules: recursive
- name: Install
run: |
sudo apt-get update
sudo apt-get install git make cmake libboost-all-dev python3-dev libeigen3-dev tcl-dev clang bison flex swig qt5-default python3-pyqt5 pyqt5-dev-tools libqt5svg5-dev qttools5-dev rapidjson-dev libqwt-qt5-dev tree
- name: ccache
uses: hendrikmuhs/ccache-action@v1
- name: Get yosys
run: |
git clone https://github.com/YosysHQ/yosys.git
cd yosys
echo "YOSYS_SHA=$(git rev-parse HEAD)" >> $GITHUB_ENV
- name: Cache yosys installation
uses: actions/cache@v2
id: cache-yosys
with:
path: .yosys
key: cache-yosys-${{ env.YOSYS_SHA }}
- name: Build yosys
run: |
export PATH="/usr/lib/ccache:/usr/local/opt/ccache/libexec:$PATH"
source ./.github/ci/build_deps.sh
build_yosys
if: steps.cache-yosys.outputs.cache-hit != 'true'
- name: Get coriolis
run: |
cd $HOME
mkdir -p ./coriolis-2.x/src/support
cd ./coriolis-2.x/src/support
git clone https://github.com/miloyip/rapidjson
cd ..
git clone https://gitlab.lip6.fr/vlsi-eda/coriolis
cd coriolis
echo "CORIOLIS_SHA=$(git rev-parse HEAD)" >> $GITHUB_ENV
- name: Cache coriolis installation
uses: actions/cache@v2
id: cache-coriolis
with:
path: /home/runner/coriolis-2.x/Linux.x86_64/Release.Shared/install
key: cache-coriolis-v2-${{ env.CORIOLIS_SHA }}
- name: Build coriolis
run: |
export PATH="/usr/lib/ccache:/usr/local/opt/ccache/libexec:$PATH"
source ./.github/ci/build_deps.sh
build_coriolis
if: steps.cache-coriolis.outputs.cache-hit != 'true'
- name: Build SoC
run: |
source ./.github/ci/build_soc.sh
install_pydeps && do_build
|
<reponame>slaclab/epix-hr-10k-2m
ePixHr10kT:
enable: True
ForceWrite: False
InitAfterConfig: False
EpixHR:
enable: True
TriggerRegisters:
enable: True
RunTriggerEnable: False
RunTriggerDelay: 0
DaqTriggerEnable: False
DaqTriggerDelay: 0
AutoRunEn: True
AutoDaqEn: True
AutoTrigPeriod: 1000000
PgpTrigEn: False
|
<filename>.github/workflows/docker-rpm.yml
name: Docker Build and Review RPM
on:
push:
branches-ignore:
- '**'
paths:
- opae.spec
- scripts/docker_rpm.sh
- .github/workflows/docker-rpm.yml
jobs:
build_docker_image:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Build a Docker image and RPM file
run: ${{ github.workspace }}/scripts/docker_rpm.sh
- name: Archive Results
uses: actions/upload-artifact@v1
with:
name: rpmbuild
path: rpmbuild
|
<filename>hw/vendor/pulp_platform_axi_riscv_atomics/src_files.yml
axi_riscv_atomics:
files: [
src/axi_res_tbl.sv,
src/axi_riscv_amos_alu.sv,
src/axi_riscv_amos.sv,
src/axi_riscv_amos_wrap.sv,
src/axi_riscv_lrsc.sv,
src/axi_riscv_lrsc_wrap.sv,
src/axi_riscv_atomics.sv,
src/axi_riscv_atomics_wrap.sv,
]
|
<gh_stars>1-10
---
project:
description: "RISC-V SoC."
foundry: "SkyWater"
git_url: "https://github.com/kasirgalabs/kasirga-k0.git"
organization: "Kasirga Labs"
organization_url: "http://www.kasirgalabs.com/"
owner: "<NAME>"
process: "SKY130"
project_name: "Kasirga K0"
project_id: "00000000"
tags:
- "in-order processor"
- "soc"
- "riscv"
category: "Test Harness"
top_level_netlist: "caravel/verilog/gl/caravel.v"
user_level_netlist: "verilog/gl/user_project_wrapper.v"
version: "v1"
cover_image: "docs/source/_static/caravel_harness.png"
|
<gh_stars>10-100
site_name: Mill
repo_url: https://github.com/CircuitCoder/mill
repo_name: CircuitCoder/mill
edit_uri: edit/master/doc/docs/
copyright: Copyright © Mill developers, released under CC BY-NC-SA 4.0
plugins:
- search:
- git-revision-date-localized:
type: iso_datetime
fallback_to_build_date: true
locale: zh
time_zone: Asia/Shanghai
theme:
name: material
language: zh
palette:
primary: blue grey
accent: orange
features:
- navigation.tabs
- navigation.instant
icon:
repo: fontawesome/brands/github
custom_dir: overrides
nav:
- 主页: index.md
- 示例 CPU 设计:
- 总览: arch/overview.md
- 实现细节: arch/impl.md
- 扩展开发指南: arch/ext.md
- 测试框架:
- Recipes: usage/recipes.md
- riscv-tests: usage/riscv-tests.md
- GTKWave / FST: usage/gtkwave.md
- 开发日志:
- 主页: timeline/index.md
- "2020-10-03": timeline/2020-10-03.md
- "2020-10-18": timeline/2020-10-18.md
- "2020-10-21": timeline/2020-10-21.md
- "2020-10-25": timeline/2020-10-25.md
- "2020-10-26": timeline/2020-10-26.md
- "2020-10-30": timeline/2020-10-30.md
extra_css:
- styles/index.css
markdown_extensions:
- mdx_truly_sane_lists
- pymdownx.highlight:
linenums: true
- pymdownx.superfences
|
<filename>site/docs/cloudbuild-deploy-docs.yaml
# Copyright lowRISC contributors.
# Licensed under the Apache License, Version 2.0, see LICENSE for details.
# SPDX-License-Identifier: Apache-2.0
steps:
- name: 'gcr.io/active-premise-257318/builder'
args: ['./util/build_docs.py']
- name: 'gcr.io/cloud-builders/gsutil'
args: ['-m', 'rsync', '-r', '-d','build/docs/', 'gs://active-premise-257318']
|
<reponame>IanBoyanZhang/kratos
name: Google Test
on: [push]
jobs:
build:
runs-on: ubuntu-latest
if: "!contains(github.event.head_commit.message, 'skip ci')"
steps:
- uses: actions/checkout@v2
- name: Checkout submodules
shell: bash
run: |
auth_header="$(git config --local --get http.https://github.com/.extraheader)"
git submodule sync --recursive
git -c "http.extraheader=$auth_header" -c protocol.version=2 submodule update --init --force --recursive --depth=1
- name: Run ctest with valgrind
shell: bash
env:
BUILD_WHEEL: false
OS: linux
run: |
source ./scripts/ci.sh
|
<filename>modules/dem_uart/test/test_dem_uart_wb.manifest.yaml
module: test_dem_uart_wb
sources:
- ../../../interfaces/common/dii_channel.sv
- ../../../blocks/regaccess/common/osd_regaccess.sv
- ../../../blocks/regaccess/common/osd_regaccess_demux.sv
- ../../../blocks/regaccess/common/osd_regaccess_layer.sv
- ../../../interconnect/common/ring_router_mux.sv
- ../common/osd_dem_uart.sv
- ../common/osd_dem_uart_16550.sv
- ../common/osd_dem_uart_wb.sv
toplevel: osd_dem_uart_wb
simulators:
-vcs
|
<gh_stars>10-100
description: >-
Final version of data and analysis scripts for sigcomm submission.
Formatting and analysis differences in analysis.ipynb.
Telemetry has also been disabled in the experiment as a whole.
experiment: e2e
repositories:
P4Boosters: 317c7f28
TofinoP4Boosters: 1d83cb15853
files:
data: tc7_output/
config.yml: execution/cfgs/tc7_e2e_iperf_and_mcd.yml
analysis.ipynb: analysis/e2eAnalysis.ipynb
run_script.sh: execution/run_tc7.sh
documentation.md: e2e.md
e2e.pdf: analysis/e2e.pdf
Encoder.tar.gz: ../bitstreams/RSEEncoder1Port.tar.gz
Decoder.tar.gz: ../bitstreams/RSEDecoder1Port.tar.gz
Memcached.tar.gz: ../bitstreams/Memcached.tar.gz
|
# Copyright 2020 ETH Zurich and University of Bologna.
# Solderpad Hardware License, Version 0.51, see LICENSE for details.
# SPDX-License-Identifier: SHL-0.51
package:
name: snitch_ssr
authors:
- <NAME> <<EMAIL>>
- <NAME> <<EMAIL>>
dependencies:
common_cells: {path: ../../vendor/pulp_platform_common_cells}
# Local dependencies.
tcdm_interface: {path: ../../ip/tcdm_interface}
snitch: {path: ../../ip/snitch}
sources:
# Level 0:
- src/snitch_ssr_addr_gen.sv
- src/snitch_ssr_switch.sv
# Level 1:
- src/snitch_ssr.sv
# Level 2:
- src/snitch_ssr_streamer.sv
|
<gh_stars>10-100
image: Visual Studio 2019
clone_depth: 1
platform:
- Win32
configuration:
- Release
environment:
matrix:
- PYTHON: "36"
- PYTHON: "37"
- PYTHON: "38"
- PYTHON: "39"
# scripts that are called at very beginning, before repo cloning
init:
# if it's not tagged, only run py39
- ps: If (($env:APPVEYOR_REPO_TAG -notmatch "true" ) -and ($env:PYTHON -ne "39")) {Exit-AppveyorBuild}
- cmd: cmake --version
install:
- git submodule update --init --recursive
- SET PATH=C:\\Python%PYTHON%-x64;C:\\Python%PYTHON%-x64\\Scripts;%PATH%
- python --version
- python -m pip install wheel cmake==3.15.3 pytest
- SET MAKE_PROGRAM=C:\\msys64\\usr\\bin\\make
- echo %MAKE_PROGRAM%
build_script:
# - ps: $blockRdp = $true; iex ((new-object net.webclient).DownloadString('https://raw.githubusercontent.com/appveyor/ci/master/scripts/enable-rdp.ps1'))
- python setup.py bdist_wheel
# install the wheel
- pip install astor
- pip install --no-index --find-links=dist kratos
test_script:
- pytest -v tests/
artifacts:
- path: dist\*
name: pypiartifacts
deploy_script:
- pip install twine
# copied from here
- # https://github.com/AndrewAnnex/SpiceyPy/blob/master/appveyor.yml
- echo "Starting Artifact Deployment"
# populate pypirc file for twine
- echo [distutils] > %USERPROFILE%\\.pypirc
- echo index-servers = >> %USERPROFILE%\\.pypirc
- echo pypi >> %USERPROFILE%\\.pypirc
- echo [pypi] >> %USERPROFILE%\\.pypirc
- echo username=keyi >> %USERPROFILE%\\.pypirc
- echo password=%<PASSWORD>% >> %USERPROFILE%\\.pypirc
# upload to pypi for windows
- set HOME=%USERPROFILE%
- ps: If ($env:APPVEYOR_REPO_TAG -imatch "true" ) { Invoke-Expression "twine upload --skip-existing dist/*.whl" } Else { write-output "Not on a tag on master, won't deploy to pypi"}
- echo "Finished Artifact Deployment"
|
defaults:
- <EMAIL>:
- "1600"
- "3200"
- "6400"
- "12800"
- <EMAIL>:
- "7010"
- "7020"
- "at200"
- <EMAIL>:
- "data_1_4_1"
- "data_1_4_2"
- "data_in_redir"
- "dotProduct_1"
- "dotProduct_2"
- "Sigmoid_axi"
- <EMAIL>:
- "update_knn1"
- "update_knn2"
- "update_knn3"
- "update_knn4"
- "update_knn5"
- "update_knn6"
- <EMAIL>:
- "coloringFB_bot_m"
- "coloringFB_top_m"
- "data_redir_m"
- "rasterization2_m"
- "zculling_bot"
- "zculling_top"
- <EMAIL>:
- "coloringFB_bot_m"
- "data_redir_m"
- "rasterization2_m"
- "zculling_top"
- <EMAIL>:
- "matmul"
- <EMAIL>:
- "top"
vpr_options:
bb_factor: 10
acc_fac: 0.7
astar_fac: 1.8
initial_pres_fac: 2.828
pres_fac_mult: 1.2
# max_criticality: 0.999
# target_ext_pin_util:
# input: 1.0
# output: 1.0
# place_algorithm: criticality_timing
load_vivado: "module load xilinx/2020.2"
num_trials: 50
rrgraph_install_dir: /media/lilbirb/scratch/code/doblink/build/share/symbiflow/arch
|
<filename>ci/vivado-checks.yml<gh_stars>0
quick_check:enclustra_ax3_pm3_a35__2018.3:
extends: .template_vivado_quick_check
variables:
VIVADO_VERSION: "2018.3"
PROJ: enclustra_ax3_pm3_a35
quick_check:enclustra_ax3_pm3_a35__2019.2:
extends: .template_vivado_quick_check
variables:
VIVADO_VERSION: "2019.2"
PROJ: enclustra_ax3_pm3_a35
quick_check:enclustra_ax3_pm3_a50__2018.3:
extends: .template_vivado_quick_check
variables:
VIVADO_VERSION: "2018.3"
PROJ: enclustra_ax3_pm3_a50
quick_check:enclustra_ax3_pm3_a50__2019.2:
extends: .template_vivado_quick_check
variables:
VIVADO_VERSION: "2019.2"
PROJ: enclustra_ax3_pm3_a50
quick_check:kc705_basex__2018.3:
extends: .template_vivado_quick_check
variables:
VIVADO_VERSION: "2018.3"
PROJ: kc705_basex
quick_check:kc705_basex__2019.2:
extends: .template_vivado_quick_check
variables:
VIVADO_VERSION: "2019.2"
PROJ: kc705_basex
quick_check:kc705_gmii__2018.3:
extends: .template_vivado_quick_check
variables:
VIVADO_VERSION: "2018.3"
PROJ: kc705_gmii
quick_check:kc705_gmii__2019.2:
extends: .template_vivado_quick_check
variables:
VIVADO_VERSION: "2019.2"
PROJ: kc705_gmii
quick_check:kcu105_basex__2018.3:
extends: .template_vivado_quick_check
variables:
VIVADO_VERSION: "2018.3"
PROJ: kcu105_basex
quick_check:kcu105_basex__2019.2:
extends: .template_vivado_quick_check
variables:
VIVADO_VERSION: "2019.2"
PROJ: kcu105_basex
quick_check:zcu102_basex__2018.3:
extends: .template_vivado_quick_check
variables:
VIVADO_VERSION: "2018.3"
PROJ: zcu102_basex
quick_check:zcu102_basex__2019.2:
extends: .template_vivado_quick_check
variables:
VIVADO_VERSION: "2019.2"
PROJ: zcu102_basex
quick_check:zcu102_c2c_loopback__2019.1:
extends: .template_vivado_quick_check
variables:
VIVADO_VERSION: "2019.1"
PROJ: zcu102_c2c_loopback
quick_check:k800__2018.3:
extends: .template_vivado_quick_check
variables:
VIVADO_VERSION: "2018.3"
PROJ: k800
quick_check:k800__2019.2:
extends: .template_vivado_quick_check
variables:
VIVADO_VERSION: "2019.2"
PROJ: k800
quick_check:vcu118_pcie__2018.3:
extends: .template_vivado_quick_check
variables:
VIVADO_VERSION: "2018.3"
PROJ: vcu118_pcie
quick_check:vcu118_pcie__2019.2:
extends: .template_vivado_quick_check
variables:
VIVADO_VERSION: "2019.2"
PROJ: vcu118_pcie
quick_check:vcu118_sgmii__2018.3:
extends: .template_vivado_quick_check
variables:
VIVADO_VERSION: "2018.3"
PROJ: vcu118_sgmii
quick_check:vcu118_sgmii__2019.2:
extends: .template_vivado_quick_check
variables:
VIVADO_VERSION: "2019.2"
PROJ: vcu118_sgmii
check-depfiles:
extends: .template_base
tags:
- docker
- xilinx-tools
stage: quick_checks
variables:
VIVADO_VERSION: "2019.2"
script:
- ipbb init work_area
- cd work_area
- ln -s ${CI_PROJECT_DIR} src/ipbus-firmware
- ./src/ipbus-firmware/tests/ci/check-dep-files.sh
artifacts:
when: on_success
paths:
- work_area
expire_in: 1 day
|
package:
name: lowrisc_gpio
description: "lowRISC GPIO"
authors: ["lowRISC Contributors"]
dependencies:
lowrisc_prim: {path: ../prim}
sources:
# Level 0
- rtl/gpio_reg_pkg.sv
# Level 1
- rtl/gpio_reg_top.sv
- rtl/gpio.sv
|
language: rust
# cache: cargo
rust:
- stable
- beta
- nightly
# minimum supported version
- "1.43.0"
jobs:
allow_failures:
- rust: nightly
fast_finish: true
before_script:
- rustup component add rustfmt
script:
- "[ $TRAVIS_RUST_VERSION != stable ] || cargo fmt -- --check"
- cargo build
- cargo test --all
- test/run.sh
- python3 scripts/test.py --debug --verbose
|
<reponame>SubjeBilisim/anasymod
PROJECT:
dt: 0.1e-6
board_name: PYNQ_Z1
plugins: ['msdsl']
emu_clk_freq: 10e6
FPGA_TARGET:
fpga:
tstop: 10e-6
|
# TODO: Extend this to add job(s) for simulations, can be run in parallel with risc-v formal.
dist: focal
sudo: false
language: generic
cache:
directories:
- /home/travis/build/$TRAVIS_PULL_REQUEST_SLUG/formal/env
- /home/travis/build/$TRAVIS_REPO_SLUG/formal/env
before_install:
install:
# If env is not provided by the cache and marked passed, remove any cached env results and build it.
- ( cd formal && if [[ ! -e env/PASSED ]]; then rm -rf env/* && ./make_env.sh; fi )
# Record the commit ID of the latest yosys.
- git ls-remote --heads https://github.com/cliffordwolf/yosys.git refs/heads/master > yosys_latest_commit_id.txt && sed -i 's/\s.*//' yosys_latest_commit_id.txt
# Record the commit ID of the latest SymbiYosys.
- git ls-remote --heads https://github.com/cliffordwolf/SymbiYosys.git refs/heads/master > SymbiYosys_latest_commit_id.txt && sed -i 's/\s.*//' SymbiYosys_latest_commit_id.txt
# Install the Sandpiper-SaaS client
- pip3 install sandpiper-saas --user
before_script:
- cd formal
- if ("$TRAVIS_PULL_REQUEST"); then export BUILD_PATH=$TRAVIS_PULL_REQUEST_SLUG; else export BUILD_PATH=$TRAVIS_REPO_SLUG; fi
# TODO : riscv-formal for 6 stage implementation
jobs :
include :
- stage : "Check all 3 configs with risc-v formal" # Run riscv-formal for WARP-V.
name : "1 Stage - riscv-formal"
script:
- sed -i "/-stage/c\ m4_define([\'M4_STANDARD_CONFIG\'], [\'1-stage\'])" /home/travis/build/$BUILD_PATH/formal/warp-v_formal.tlv
- sed -i "/insn /c\insn 20" /home/travis/build/$BUILD_PATH/formal/checks.cfg
- PATH=/home/travis/build/$BUILD_PATH/formal/env/bin:$PATH bash -c 'make verif';
- script:
- sed -i "/-stage/c\ m4_define([\'M4_STANDARD_CONFIG\'], [\'4-stage\'])" /home/travis/build/$BUILD_PATH/formal/warp-v_formal.tlv
- sed -i "/insn /c\insn 29" /home/travis/build/$BUILD_PATH/formal/checks.cfg
- PATH=/home/travis/build/$BUILD_PATH/formal/env/bin:$PATH bash -c 'make verif';
name : "4 Stage - riscv-formal"
# the structure for the YAML format is non-standard in some way, but Travis interprets it correctly
# with both the scripts being run parallel under same stage (in Travis terminology)
after_success:
after_failure:
# Upload files for debug.
- echo "Uploading up to 4 failure traces for debug" && for FILE in `ls /home/travis/build/$BUILD_PATH/formal/checks/*/FAIL | head -n 4`; do curl --upload-file `echo $FILE | sed s/FAIL$//`engine_0/trace.vcd https://transfer.sh/`echo $FILE | sed 's/^.*\/\([^\/]*\)\/FAIL$/\1/'`_trace.vcd && echo; done
after_script:
# Report a message if we didn't use the latest commit of yosys.
- if cmp -s yosys_latest_commit_id.txt env/yosys_commit_id.txt; then echo '******** Using the following cached yosys (https://github.com/cliffordwolf/yosys.git) commit ID which is not the latest. Consider clearing Travis cache. **********' && cat env/yosys_commit_id.txt && echo '**********'; fi
# Report a message if we didn't use the latest commit of SymbiYosys.
- if cmp -s SymbiYosys_latest_commit_id.txt env/SymbiYosys_commit_id.txt; then echo '******** Using the following cached SymbiYosys (https://github.com/cliffordwolf/SymbiYosys) commit ID which is not the latest. Consider clearing Travis cache. **********' && cat env/SymbiYosys_commit_id.txt && echo '**********'; fi
|
apiVersion: kctf.dev/v1
kind: Challenge
metadata:
name: xss-bot
spec:
deployed: true
powDifficultySeconds: 0
network:
public: false
healthcheck:
# TIP: disable the healthcheck during development
enabled: true
# You can allow the bot to connect to other challenges internally.
# This can be useful during testing so that you don't have to make your
# challenge public.
# The challenge will be reachable at $name.default.svc.cluster.local or
# simply at $name with the default k8s search list.
#allowConnectTo:
# - otherchallenge
horizontalPodAutoscalerSpec:
maxReplicas: 10
minReplicas: 2
targetCPUUtilizationPercentage: 80
podTemplate:
template:
spec:
containers:
- name: 'challenge'
resources:
requests:
memory: "2048Mi"
cpu: "1000m"
|
<reponame>e7p/hal
pull_request_rules:
- name: automatic merge on CI success and review
conditions:
- status-success=ci/gitlab/gitlab.com
- "#approved-reviews-by>=1"
- base=master
- label!="work in progress"
actions:
merge:
method: merge
- name: Delete branch after merge
actions:
delete_head_branch: {}
conditions:
- merged
|
name: aes
clock_port: clk
verilog:
- aes.v
- byte_mixcolum.v
- keysched.v
- mixcolum.v
- sbox.v
- subbytes.v
- timescale.v
- word_mixcolum.v
|
<reponame>inmcm/Simon_Speck_Ciphers
language: python
python:
- "2.6"
- "2.7"
- "3.5"
- "3.6"
- "3.7-dev"
branches:
only:
- master
- inmcm/python_packaging
before_install:
- cd Python/simonspeckciphers/
- pip install setuptools_scm # required for py2.6
install:
- python setup.py install
script:
- pytest -vvv
|
<gh_stars>1-10
# Copyright 2019 Xilinx Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
description: inception-v2 classifier on ImageNet.
input size: 224*224
float ops: 4G
task: classification
framework: caffe
prune: 'no'
version: 2.0
files:
- name: cf_inceptionv2_imagenet_224_224_4G_2.0
type: float & quantized
board: GPU
download link: https://www.xilinx.com/bin/public/openDownload?filename=cf_inceptionv2_imagenet_224_224_4G_2.0.zip
checksum: 43dcb6ca59325a5867704a40fb87ce6f
- name: inception_v2
type: xmodel
board: zcu102 & zcu104 & kv260
download link: https://www.xilinx.com/bin/public/openDownload?filename=inception_v2-zcu102_zcu104_kv260-r2.0.0.tar.gz
checksum: aa6220cc9425fa739ea015eb13a8cd5e
- name: inception_v2
type: xmodel
board: vck190
download link: https://www.xilinx.com/bin/public/openDownload?filename=inception_v2-vck190-r2.0.0.tar.gz
checksum: 56ac9651f7880acada8413ab2cc8473c
- name: inception_v2
type: xmodel
board: vck50006pe-DPUCVDX8H-DWC
download link: https://www.xilinx.com/bin/public/openDownload?filename=inception_v2-vck50006pe-DPUCVDX8H-DWC-r2.0.0.tar.gz
checksum: 0223c2b554b37b3d56a29d6c85132ed1
- name: inception_v2
type: xmodel
board: u50lv-DPUCAHX8H
download link: https://www.xilinx.com/bin/public/openDownload?filename=inception_v2-u50lv-DPUCAHX8H-r2.0.0.tar.gz
checksum: ae1c2dced17093a2d8baf1abe1bf3493
- name: inception_v2
type: xmodel
board: u50lv-DPUCAHX8H-DWC & u55c-DPUCAHX8H-DWC
download link: https://www.xilinx.com/bin/public/openDownload?filename=inception_v2-u55c-u50lv-DPUCAHX8H-DWC-r2.0.0.tar.gz
checksum: a685171901fe05c7eeda429f4637377a
license: https://github.com/Xilinx/Vitis-AI/blob/master/LICENSE
|
name: CI/VHDLTest
on:
push:
branches: [ master ]
pull_request:
branches: [ master ]
jobs:
build:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v2
- uses: ghdl/setup-ghdl-ci@nightly
with:
backend: mcode
- name: Setup Python
uses: actions/setup-python@v2
- name: Install VHDLTest
run: pip install VHDLTest
- name: VHDLTest Utility
run: python -m VHDLTest --config test.yaml
|
site_name: PsPIN
repo_url: https://github.com/spcl/pspin
theme:
name: material
nav:
- Home: 'index.md'
- Getting started:
- Docker image: 'docker.md'
- Manual installation: 'compile.md'
- Running examples: 'examples.md'
- Testing your own handlers:
- Minimal handler: 'handlers_minimal.md'
- sPIN API: 'api.md'
- Simulation driver: 'sim_driver.md'
|
<reponame>hito0512/Vitis-AI
# Copyright 2019 Xilinx Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
description: Yolov4 based on coco2014.
input size: 416*416
float ops: 60.1G
task: detection
framework: caffe
prune: 'no'
version: 2.0
files:
- name: dk_yolov4_coco_416_416_60.1G_2.0
type: float & quantized
board: GPU
download link: https://www.xilinx.com/bin/public/openDownload?filename=dk_yolov4_coco_416_416_60.1G_2.0.zip
checksum: a3c906bb24586b04970462c44ab579ae
- name: yolov4_leaky_spp_m
type: xmodel
board: zcu102 & zcu104 & kv260
download link: https://www.xilinx.com/bin/public/openDownload?filename=yolov4_leaky_spp_m-zcu102_zcu104_kv260-r2.0.0.tar.gz
checksum: a1dde0177b90c1c3ebeb7d3d682d579e
- name: yolov4_leaky_spp_m
type: xmodel
board: vck190
download link: https://www.xilinx.com/bin/public/openDownload?filename=yolov4_leaky_spp_m-vck190-r2.0.0.tar.gz
checksum: 6702552304c9e3989f1b1da988311efb
- name: yolov4_leaky_spp_m
type: xmodel
board: vck50006pe-DPUCVDX8H-DWC
download link: https://www.xilinx.com/bin/public/openDownload?filename=yolov4_leaky_spp_m-vck50006pe-DPUCVDX8H-DWC-r2.0.0.tar.gz
checksum: 694be0fa4fc2388df738de68cdbc0d72
- name: yolov4_leaky_spp_m
type: xmodel
board: u50lv-DPUCAHX8H
download link: https://www.xilinx.com/bin/public/openDownload?filename=yolov4_leaky_spp_m-u50lv-DPUCAHX8H-r2.0.0.tar.gz
checksum: 93385fe630f2d60ba37330fdc7a56391
- name: yolov4_leaky_spp_m
type: xmodel
board: u50lv-DPUCAHX8H-DWC & u55c-DPUCAHX8H-DWC
download link: https://www.xilinx.com/bin/public/openDownload?filename=yolov4_leaky_spp_m-u55c-u50lv-DPUCAHX8H-DWC-r2.0.0.tar.gz
checksum: cb346e656697a3563b21135b84d9479d
license: https://github.com/Xilinx/Vitis-AI/blob/master/LICENSE
|
<filename>modules/wb_leds/data/wb_leds_csr.yml
memory-map:
bus: wb-32-be
name: wb_leds_csr
description: LEDs Register Bank
children:
- reg:
name: leds
width: 32
access: rw
type: unsigned
|
# Technology Setup is done in hammer-tstech28-plugin/bwrc.yml.
vlsi.core.max_threads: 24
# File inputs
synthesis.inputs:
input_files: [
"src/HDC_Sensor_Fusion_FoldedRule90/hdc_sensor_fusion.sv",
"src/HDC_Sensor_Fusion_FoldedRule90/associative_memory.sv",
"src/HDC_Sensor_Fusion_FoldedRule90/spatial_encoder.sv",
"src/HDC_Sensor_Fusion_FoldedRule90/temporal_encoder.sv",
"src/HDC_Sensor_Fusion_FoldedRule90/fuser.sv",
"src/HDC_Sensor_Fusion_FoldedRule90/hv_binary_adder.sv",
"src/HDC_Sensor_Fusion_FoldedRule90/hv_generator.sv"
]
top_module: "hdc_sensor_fusion"
# General Hammer Inputs
vlsi.inputs.sram_parameters: "src/sram_inputs.json"
vlsi.inputs.sram_parameters_meta: ["transclude", "json2list"]
# Hammer will auto-generate a CPF for simple power designs; see hammer/src/hammer-vlsi/defaults.yml for more info
vlsi.inputs.power_spec_mode: "auto"
vlsi.inputs.power_spec_type: "cpf"
# Specify clock signals
vlsi.inputs.clocks: [
{name: "clk", period: "550ns", uncertainty: "0.1ns"}
]
vlsi.inputs.custom_sdc_constraints: [
"set_input_delay -clock clk 0 [all_inputs]",
"set_output_delay -clock clk 0 [all_outputs]"
]
# Generate Make include to aid in flow
vlsi.core.build_system: make
# Power Straps
par.power_straps_mode: generate
par.generate_power_straps_method: by_tracks
par.blockage_spacing: 2.0
par.generate_power_straps_options:
by_tracks:
strap_layers:
- M3
- M4
- M5
- M6
- M7
- M8
- M9
pin_layers:
- M9
track_width: 7 # minimum allowed for M2 & M3
track_spacing: 0
track_spacing_M3: 1 # to avoid M2 shorts at higher density
track_start: 10
power_utilization: 0.05
power_utilization_M8: 1.0
power_utilization_M9: 1.0
# Placement Constraints
vlsi.inputs.placement_constraints:
- path: "hdc_sensor_fusion"
type: toplevel
x: 0
y: 0
width: 400
height: 400
margins:
left: 0
right: 0
top: 0
bottom: 0
- path: "hdc_sensor_fusion/place_obs_bottom"
type: obstruction
obs_types: ["place"]
x: 0
y: 0
width: 400
height: 1.08 # 1 core site tall, necessary to avoid shorts
# VDD supply constraints
vlsi.inputs.supplies:
VDD: "0.80 V"
GND: "0 V"
vlsi.inputs.mmmc_corners: [
{
"name": "ss0p72v125c",
"type": "setup",
"voltage": "0.72 V",
"temp": "125 C"
},
{
"name": "ff0p88v0c",
"type": "hold",
"voltage": "0.88 V",
"temp": "0 C"
},
{
"name": "tt0p8v25c",
"type": "extra",
"voltage": "0.80 V",
"temp": "25 C"
}
]
# Pin placement constraints
vlsi.inputs.pin_mode: generated
vlsi.inputs.pin.generate_mode: semi_auto
vlsi.inputs.pin.assignments: [
{pins: "*", layers: ["M5", "M7"], side: "bottom"}
]
# SRAM Compiler compiler options
vlsi.core.sram_generator_tool: "sram_compiler"
vlsi.core.sram_generator_tool_path: ["hammer-tstech28-plugin"]
vlsi.core.sram_generator_tool_path_meta: "append"
|
<reponame>mfkiwl/neorv32
# Run the RISC-V riscv-arch-test test framework port to check current NEORV32 version
name: 'riscv-arch-test'
on:
workflow_dispatch:
jobs:
build:
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
suite:
- I
- C
- M
- privilege
- Zifencei
name: 'RISC-V Compatibility Check'
steps:
- name: '🧰 Repository Checkout'
uses: actions/checkout@v2
- name: '🔧 Setup Environment Variables'
run: |
echo "$GITHUB_WORKSPACE/riscv/bin" >> $GITHUB_PATH
echo $GITHUB_WORKSPACE
- name: '🔧 Setup RISC-V GCC and doit'
run: |
mkdir riscv
curl -fsSL https://github.com/stnolting/riscv-gcc-prebuilt/releases/download/rv32i-2.0.0/riscv32-unknown-elf.gcc-10.2.0.rv32i.ilp32.newlib.tar.gz | \
tar -xzf - -C riscv
ls -al riscv
pip3 install doit
- name: '🔧 Setup GHDL Simulator'
uses: ghdl/setup-ghdl-ci@nightly
with:
backend: gcc
- name: '🚧 Run RISC-V Architecture Tests'
run: ./do.py RunRISCVArchitectureTests -s ${{ matrix.suite }}
|
"top":
arch: "artix7_200t"
device_family: "xc7a200t"
device_name: "doblink-6-overlay"
device_speed: "sbg484-1"
device: "xc7a200t-doblink-6-overlay-virt"
board: "nexys_video"
use_roi: "FALSE"
timeout: 1600
|
# -*- yaml -*-
modulename: path/off/SRC_BASE_DIR/modulev1.sv
# Empty assignment means leave module out of project
bad_mod:
# Include other yaml files
include:
- chip/a10_substitutions.yml
- chip/s10_substitutions.yml
|
name: sv-test-env
channels:
- LiteX-Hub
- pkgw-forge
- conda-forge
dependencies:
# - LiteX-Hub::iverilog
# - LiteX-Hub::moore
# - LiteX-Hub::odin_ii
# - LiteX-Hub::slang
# - LiteX-Hub::surelog
# - LiteX-Hub::sv-parser
# - LiteX-Hub::tree-sitter-verilog
# - LiteX-Hub::uhdm-integration-verilator
# - LiteX-Hub::uhdm-integration-yosys
# - LiteX-Hub::verible
# - LiteX-Hub::verilator
# - LiteX-Hub::yosys
# - LiteX-Hub::antmicro-yosys-complete
# - LiteX-Hub::zachjs-sv2v
- ccache
- python=3.8
- pip
- pip: # Packages installed from PyPI
- -r file:requirements.txt
|
name: vitis-ai-optimizer_darknet
channels:
- conda-forge
- defaults
dependencies:
- python=3.6
- vai_optimizer_darknet_gpu
|
<reponame>Laegluin/mikrorechner<filename>.travis.yml
language: rust
matrix:
include:
- os: windows
rust: stable
env:
- DEPLOY=1
- os: windows
rust: beta
- os: windows
rust: nightly
- os: linux
rust: stable
env:
- DEPLOY=1
- os: linux
rust: beta
- os: linux
rust: nightly
- os: osx
rust: stable
env:
- DEPLOY=1
- os: osx
rust: beta
- os: osx
rust: nightly
allow_failures:
- rust: nightly
env:
global:
- RUST_BACKTRACE=1
cache:
directories:
- $HOME/.cargo
script:
- cargo test
before_deploy:
- if [[ "$TRAVIS_OS_NAME" = "windows" ]]; then choco install zip; fi
- cargo build --release
- mkdir -p target/release/laegluin.cml
- cp -r cmlc/package.json cmlc/language-configuration.json cmlc/grammars target/release/laegluin.cml
- cd target/release
- zip -r $TRAVIS_TAG-$TRAVIS_OS_NAME.zip simulator cmlc simulator.exe cmlc.exe laegluin.cml
- cd ../..
deploy:
provider: releases
name: $TRAVIS_TAG
api_key:
secure: <KEY>
on:
tags: true
condition: $DEPLOY = 1
skip_cleanup: true
file_glob: true
file: target/release/*.zip
|
<reponame>mfkiwl/snitch
axi/axi:
commit: v0.29.1
domain: [cluster, soc]
common_cells:
commit: v1.21.0
domain: [cluster, soc]
|
name: "Direct Push Warning"
on:
push:
branches:
- master
- release-**
jobs:
build:
runs-on: ubuntu-latest
if: github.repository_owner == 'NixOS'
env:
GITHUB_SHA: ${{ github.sha }}
GITHUB_REPOSITORY: ${{ github.repository }}
steps:
- name: Check if commit is a merge commit
id: ismerge
run: |
ISMERGE=$(curl -H 'Accept: application/vnd.github.groot-preview+json' -H "authorization: Bearer ${{ secrets.GITHUB_TOKEN }}" https://api.github.com/repos/${{ env.GITHUB_REPOSITORY }}/commits/${{ env.GITHUB_SHA }}/pulls | jq -r '.[] | select(.merge_commit_sha == "${{ env.GITHUB_SHA }}") | any')
echo "::set-output name=ismerge::$ISMERGE"
# github events are eventually consistent, so wait until changes propagate to thier DB
- run: sleep 60
if: steps.ismerge.outputs.ismerge != 'true'
- name: Warn if the commit was a direct push
if: steps.ismerge.outputs.ismerge != 'true'
uses: peter-evans/commit-comment@v1
with:
body: |
@${{ github.actor }}, you pushed a commit directly to master/release branch
instead of going through a Pull Request.
That's highly discouraged beyond the few exceptions listed
on https://github.com/NixOS/nixpkgs/issues/118661
|
<filename>src_files.yml
udma_external_per:
files: [
rtl/udma_external_per_reg_if.sv,
rtl/udma_traffic_gen_rx.sv,
rtl/udma_traffic_gen_tx.sv,
rtl/udma_external_per_top.sv,
rtl/udma_external_per_wrapper.sv,
]
|
# Copyright 2021 ETH Zurich and University of Bologna.
# Licensed under the Apache License, Version 2.0, see LICENSE for details.
# SPDX-License-Identifier: Apache-2.0
cache_root_dir: /usr/scratch2/dolent1/gitlabci/buildcache/mempool
artifacts:
tc-riscv-gcc:
inputs:
- Makefile
- toolchain/riscv-gnu-toolchain
outputs:
- install/riscv-gcc
tc-llvm:
inputs:
- Makefile
- toolchain/llvm-project
outputs:
- install/llvm
riscv-isa-sim:
inputs:
- Makefile
- toolchain/riscv-isa-sim
- toolchain/riscv-opcodes/encoding_out.h
outputs:
- install/riscv-isa-sim
verilator:
inputs:
- Makefile
- toolchain/verilator
outputs:
- install/verilator
verilator-model:
inputs:
- Makefile
- toolchain/verilator
- config/config.mk
- hardware/deps
- hardware/src
- hardware/tb
- hardware/Makefile
outputs:
- hardware/verilator_build/Vmempool_tb_verilator
halide:
inputs:
- Makefile
- toolchain/halide
outputs:
- install/halide
|
<filename>rtl/vendor/pulp_platform_register_interface/.github/workflows/lint.yml
# Copyright 2020 ETH Zurich and University of Bologna.
# Licensed under the Apache License, Version 2.0, see LICENSE for details.
# SPDX-License-Identifier: Apache-2.0
# Run all lint checks
name: lint
on: [push, pull_request]
jobs:
################
# Verible Lint #
################
verilog:
name: Verilog Sources
# This job runs on Linux (fixed ubuntu version)
runs-on: ubuntu-18.04
env:
VERIBLE_VERSION: 0.0-807-g10e7c71
steps:
- uses: actions/checkout@v2
- name: Install Verible
run: |
set -e
mkdir -p build/verible
cd build/verible
curl -Ls -o verible.tar.gz https://github.com/google/verible/releases/download/v$VERIBLE_VERSION/verible-v$VERIBLE_VERSION-Ubuntu-18.04-bionic-x86_64.tar.gz
sudo mkdir -p /tools/verible && sudo chmod 777 /tools/verible
tar -C /tools/verible -xf verible.tar.gz --strip-components=1
echo "PATH=$PATH:/tools/verible/bin" >> $GITHUB_ENV
# Run linter in hw/ip subdir
- name: Run Lint
run: |
echo "::add-matcher::.github/verible-lint-matcher.json"
find . -name "*sv" | xargs verible-verilog-lint --waiver_files lint/verible.waiver
echo "::remove-matcher owner=verible-lint-matcher::"
#####################
# Vendor Up-to-Date #
#####################
# Check that all vendored sources are up-to-date.
check-vendor:
name: Vendor Up-to-Date
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions/setup-python@v2
with:
python-version: 3.x
- name: Install requirements
run: pip install hjson
- name: Re-vendor and diff
run: |
find . \
-name '*.vendor.hjson' \
| xargs -n1 util/vendor.py --verbose \
&& git diff --exit-code
|
apiVersion: "v1"
kind: "Service"
metadata:
name: "xss-bot"
namespace: "default"
labels:
app: "xss-bot"
spec:
type: "LoadBalancer"
selector:
app: "xss-bot"
ports:
- protocol: "TCP"
port: 1
targetPort: 1337
|
<filename>.devcontainer/docker-compose.yml
version: '3.7'
services:
metalfs-dev:
image: metalfs/sdk-base:webpack
init: true
command: tail -f /dev/null
working_dir: /workspace
environment:
- DISPLAY=$DISPLAY
volumes:
- /tmp/.X11-unix:/tmp/.X11-unix
- ..:/workspace
devices:
- /dev/fuse
cap_add:
- SYS_ADMIN
- SYS_PTRACE
security_opt:
- apparmor:unconfined
- seccomp:unconfined
# mac_address: XX:XX:XX:XX:XX:XX
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.