Search is not available for this dataset
content
stringlengths 0
376M
|
---|
ssh:
user: iped
key: '~/.ssh/id_rsa_tofino'
port: 22
params:
rate: '{0.args.rate}'
pktgen_template: ./pktgen_template.lua
n_set_pkts: 5000
n_get_pkts: 45000
n_total_pkts: 50000
key_space: 10000
zipf_coef: 1.00
dirs:
home: /home/{0.ssh.user}
P4Boosters: '{0.dirs.home}/dcomp/P4Boosters'
init_cmds:
make_lua: >-
python format_lua_template.py {0.params.pktgen_template} {0.files.pktgen_script.src}
--rate {0.params.rate} --log {0.programs.log_dir}/pktgen.log --count {0.params.n_total_pkts}
make_pcap: >-
python2 {0.dirs.P4Boosters}/MemPacket/generate_memcached.py
--smac {0.hosts.pktgen.mac} --sip {0.hosts.pktgen.ip}
--dmac {0.hosts.mcd.mac} --dip {0.hosts.mcd.ip}
--out {0.files.test_pcap.src} --warmup-out {0.files.warmup_pcap.src}
--n-get {0.params.n_get_pkts} --n-set {0.params.n_set_pkts}
--key-space {0.params.key_space} --zipf {0.params.zipf_coef}
post_cmds:
verify_counters: >-
python verify_counters.py {0.out}/moongen/moongen.out {0.out}/counters_0.out {0.out}/counters_1.out
files:
pktgen_script:
src: '{0.out}/pktgen.lua'
dst: '{0.programs.log_dir}/pktgen.lua'
host: pktgen
moongen_script:
src: './capture.lua'
dst: '{0.programs.log_dir}/capture.lua'
host: moongen
test_pcap:
src: '{0.out}/test_pkts.pcap'
dst: '{0.programs.log_dir}/test_pkts.pcap'
host: pktgen
warmup_pcap:
src: '{0.out}/warmup_pkts.pcap'
dst: '{0.programs.log_dir}/warmup_pkts.pcap'
host: pktgen
pktgen_runner:
src: './run_lua.sh'
dst: '~/dcomp/dpdk/run_lua.sh'
host: pktgen
hosts:
fpga_prog:
addr: tclust2
pktgen:
addr: tclust7
mac: 00:02:c9:3a:84:00
ip: 10.0.0.7
mcd:
addr: tclust4
mac: 7c:fe:90:1c:36:81
ip: 10.0.0.4
moongen:
addr: tclust5
tofino:
addr: 172.16.58.3
ssh:
user: fpadmin
key: '~/.ssh/id_rsa_tofino'
port: 22
programs:
log_dir: ~/logs/{0.label}
program_fpga:
host: fpga_prog
start: cd ~/dcomp/P4Boosters/FPGA/TopLevelSDx && bash program_memcached.sh
check_rtn: 0
log:
dir: fpga
out: program_memcached.out
err: program_memcached.err
start_dataplane:
host: tofino
start: cd ~/gits/TofinoP4Boosters/flightplanE2e && ./run.sh -t -mi 7 -me 7 {0.args.dataplane_flags}
stop: cd ~/gits/TofinoP4Boosters/flightplanE2e && ./stop.sh
enforce_duration: false
stop_dataplane:
host: tofino
start: cd ~/gits/TofinoP4Boosters/flightplanE2e && ./stop.sh
check_rtn: false
moongen:
host: moongen
start: 'cd ~/dcomp/MoonGen/ && ./build/MoonGen {0.files.moongen_script.dst} 1 {log} -s100'
stop: pkill MoonGen
log:
dir: moongen
log: moongen.pcap
out: moongen.out
err: moongen.err
mcd:
host: mcd
start: 'memcached -U 11211 -l {0.hosts.mcd.ip} --protocol=ascii'
stop: 'pkill memcached'
log:
out: memcached.out
err: memcached.err
tcpreplay:
host: pktgen
start: '~/bin/tcpreplay -p {rate} -i ens3 {pcap}'
log:
dir: tcpreplay
out: tcpreplay.out
err: tcpreplay.err
pktgen:
host: pktgen
start: tmux new -d '~/dcomp/dpdk/run_lua.sh {lua} {pcap}'
enforce_duration: false
log:
out: pktgen.out
err: pktgen.err
get_counters:
host: tofino
start: >-
tmux send ucli ENTER && sleep 1 &&
tmux send pm ENTER && sleep 1 &&
tmux send show ENTER && sleep 1 &&
tmux capture-pane -J -p
log:
out: counters_{i}.out
err: counters_{i}.err
commands:
stop_dataplane:
begin: -60
program_fpga:
begin: -59
start_dataplane:
begin: -30
duration: 75
mcd:
begin: -20
duration: 65
tcpreplay:
begin: 0
pcap: '{0.files.warmup_pcap.dst}'
rate: 10000
moongen:
begin: 5
duration: 35
pktgen:
begin: 15
lua: '{0.files.pktgen_script.dst}'
pcap: '{0.files.test_pcap.dst}'
duration: 30
get_counters:
- begin: 10
- begin: 40
|
name: Test and Publish
on:
push:
branches:
- main
jobs:
checks:
uses: ./.github/workflows/checks.yml
# the jobs below are taken from the actions in https://github.com/relekang/python-semantic-release under MIT License
beautify:
name: Beautify
runs-on: ubuntu-latest
concurrency: push
needs: [ checks ]
outputs:
new_sha: ${{ steps.sha.outputs.SHA }}
steps:
- name: Set up Python 3.9
uses: actions/setup-python@v2
with:
python-version: 3.9
- name: Checkout repository
uses: actions/checkout@v2
- name: Install Black
run: python -m pip install black
- name: Beautify with Black
run: python -m black .
- name: Install isort
run: python -m pip install isort
- name: Sort imports
run: python -m isort --profile black .
- name: Commit and push changes
uses: github-actions-x/[email protected]
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
commit-message: 'style: beautify ${{ github.sha }}'
name: github-actions
email: <EMAIL>
- name: Get new SHA
id: sha
run: |
new_sha=$(git rev-parse HEAD)
echo "::set-output name=SHA::$new_sha"
semantic-release:
name: Bump Version and Release
runs-on: ubuntu-latest
concurrency: push
needs: [checks, beautify]
outputs:
new_version: ${{ steps.version.outputs.VERSION }}
new_sha: ${{ steps.sha.outputs.SHA }}
steps:
- uses: actions/checkout@v2
with:
fetch-depth: 0
ref: ${{ needs.beautify.outputs.new_sha }}
- name: install relekang/python-semantic-release
run: python3 -m pip install python-semantic-release
- name: Semantic Release
uses: relekang/python-semantic-release@master
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
repository_username: __token__
repository_password: ${{ secrets.PYPI_PROJECT_API_TOKEN }}
|
trigger:
branches:
include:
- '*'
tags:
include:
- '*'
resources:
repositories:
- repository: abs-tudelft
type: github
endpoint: github
name: abs-tudelft/azure-pipelines-templates
jobs:
- job:
pool:
vmImage: ubuntu-latest
container: abstudelft/ghdl-gcc-python:latest
steps:
- script: |
python3 setup.py build
displayName: Build
- script: |
python3 setup.py test
displayName: Test
- script: |
python3 setup.py lint
displayName: Lint
- script: |
python3 -m pip install --user pyyaml markdown2
make examples
displayName: Examples
- script: |
make doc
displayName: Doc
- script: |
python3 setup.py bdist_wheel
displayName: Wheel
- task: PublishTestResults@2
inputs:
testResultsFiles: '**/nosetests.xml'
- task: UseDotNet@2
inputs:
version: 2.x
- task: PublishCodeCoverageResults@1
inputs:
codeCoverageTool: cobertura
summaryFileLocation: '$(System.DefaultWorkingDirectory)/**/coverage.xml'
- script: |
bash <(curl https://codecov.io/bash) -f coverage.xml
env:
CODECOV_TOKEN: $(codecov)
displayName: codecov
- script: |
pip3 install --user twine
condition: and(succeeded(), startsWith(variables['Build.SourceBranch'], 'refs/tags/'))
displayName: Install twine
- task: TwineAuthenticate@0
inputs:
externalFeeds: 'pypi'
condition: and(succeeded(), startsWith(variables['Build.SourceBranch'], 'refs/tags/'))
displayName: Authenticate Twine
- script: |
python3 -m twine upload -r pypi --config-file $(PYPIRC_PATH) dist/*
condition: and(succeeded(), startsWith(variables['Build.SourceBranch'], 'refs/tags/'))
displayName: Publish to PyPi
- task: GitHubRelease@0
inputs:
gitHubConnection: github
repositoryName: abs-tudelft/vhdmmio
action: edit
tag: $(Build.SourceBranchName)
title: $(Build.SourceBranchName)
assets: $(System.DefaultWorkingDirectory)/dist/*.whl
addChangeLog: true
condition: and(succeeded(), startsWith(variables['Build.SourceBranch'], 'refs/tags/'))
displayName: Publish to GitHub
- template: jobs/mdbook-gh-pages.yml@abs-tudelft
parameters:
source: doc
output: doc/html
|
<filename>src_files.yml
axi2mem:
files: [
axi2mem_busy_unit.sv,
axi2mem_rd_channel.sv,
axi2mem.sv,
axi2mem_tcdm_rd_if.sv,
axi2mem_tcdm_synch.sv,
axi2mem_tcdm_unit.sv,
axi2mem_tcdm_wr_if.sv,
axi2mem_trans_unit.sv,
axi2mem_wr_channel.sv,
]
|
<gh_stars>0
name: Chisel-test
on: [push]
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Cache SBT
uses: actions/cache@v2
with:
path: |
~/.ivy2/cache
~/.sbt
key: ${{ runner.os }}-sbt-${{ hashFiles('**/build.sbt') }}
- name: Cache verilator
id: cache-verilator
uses: actions/cache@v2
with:
path: |
~/ccache
~/verilator-4.210
~/tools/ccache
~/tools/verilator
key: ${{ runner.os }}-verilator
- name: Build and Install Verilator
if: steps.cache-verilator.outputs.cache-hit != 'true'
run: |
sudo apt-get install git perl python3 make autoconf g++ flex bison cmake
sudo apt-get install libfl2 libfl-dev
sudo apt-get install zlibc zlib1g zlib1g-dev
sudo apt-get install libzstd-dev libhiredis-dev
git clone https://github.com/ccache/ccache.git
cd ccache
git checkout -b v4.3 refs/tags/v4.3
mkdir build && cd build
cmake -DCMAKE_BUILD_TYPE=Release -DCMAKE_INSTALL_PREFIX=/home/runner/tools/ccache ..
make -j && make install
export PATH=/home/runner/tools/ccache/bin:$PATH
cd ../../
git clone https://github.com/verilator/verilator.git
cd verilator
git checkout -b v4.210 refs/tags/v4.210
autoconf
./configure --prefix=/home/runner/tools/verilator
make -j 3
make install
- name: Cache WLA DX
id: cache-wla-dx
uses: actions/cache@v2
with:
path: |
~/wla-dx
~/tools/wla-dx
key: ${{ runner.os }}-wla-dx
- name: Build and Install WLA DX
if: steps.cache-wla-dx.outputs.cache-hit != 'true'
run: |
sudo apt install cmake
git clone https://github.com/vhelin/wla-dx
cd wla-dx
git checkout -b v10.0 refs/tags/v10.0
mkdir build && cd build # Create building directory
cmake .. -DCMAKE_INSTALL_PREFIX=/home/runner/tools/wla-dx
cmake --build . --config Release
cmake -P cmake_install.cmake
- name: Cache sbt
id: cache-sbt
uses: actions/cache@v2
with:
path: |
~/sbt-1.5.5.tgz
~/tools/sbt
key: ${{ runner.os }}-sbt
- name: Install sbt
if: steps.cache-sbt.outputs.cache-hit != 'true'
run: |
wget https://github.com/sbt/sbt/releases/download/v1.5.5/sbt-1.5.5.tgz
tar xf sbt-1.5.5.tgz
mv sbt tools
- name: Cache test data
id: cache-test-data
uses: actions/cache@v2
with:
path: |
~/work/chisel-dmg/chisel-dmg/src/test/resources/cpu/*.gb.hex*
key: ${{ runner.os }}-sbt-${{ hashFiles('**/*.s') }}
- name: Build test data
if: steps.cache-test-data.outputs.cache-hit != 'true'
run: |
export PATH=/home/runner/tools/sbt/bin:/home/runner/tools/wla-dx/bin:$PATH
sbt "convertBin2Hex"
- name: Run test
run: |
export PATH=/home/runner/tools/sbt/bin:/home/runner/tools/ccache/bin:/home/runner/tools/verilator/bin:/home/runner/tools/wla-dx/bin:$PATH
sbt "testOnly CpuTest -- -P2"
|
axi_node:
incdirs: [
.,
]
files: [
apb_regs_top.sv,
axi_address_decoder_AR.sv,
axi_address_decoder_AW.sv,
axi_address_decoder_BR.sv,
axi_address_decoder_BW.sv,
axi_address_decoder_DW.sv,
axi_AR_allocator.sv,
axi_ArbitrationTree.sv,
axi_AW_allocator.sv,
axi_BR_allocator.sv,
axi_BW_allocator.sv,
axi_DW_allocator.sv,
axi_FanInPrimitive_Req.sv,
axi_multiplexer.sv,
axi_node.sv,
axi_node_wrap.sv,
axi_node_wrap_with_slices.sv,
axi_regs_top.sv,
axi_request_block.sv,
axi_response_block.sv,
axi_RR_Flag_Req.sv,
]
|
<gh_stars>10-100
---
title: VGA Chip
author:
- Felix "xq" Queißner
date: May 07, 2020
abstract: The VGA Chip provides a framebuffer with 256×128 pixels output.
|
config:
- xlen : 64
- supportISA : [System,ZiCsr,RV64I,RV64M,RV64F,RV64D,RV64Q,RV64Zfh,RV64A]
- supportMode : [M,S]
|
<gh_stars>1-10
GitBase: ..
TopRoguePackage: ucsc_hn
RoguePackages:
- python
RogueScripts:
- ../software/scripts/renaGui
CondaDependencies:
- surf
- rce_gen3_fw_lib
RogueConfig:
Targets:
MultiRena:
ImageDir: targets/MultiRena/images
Extensions:
- bit
SlaveRena:
ImageDir: targets/SlaveRena/images
Extensions:
- bit
DefaultBoot:
ImageDir: targets/DefaultBoot/images
Extensions:
- bit
Releases:
MultiRena:
Targets:
- MultiRena
- SlaveRena
- DefaultBoot
Types:
- Rogue
LibDir: ../software/lib/
|
components:
incdirs: [
../includes,
]
files: [
apb_clkdiv.sv,
apb_timer_unit.sv,
apb_soc_ctrl.sv,
counter.sv,
memory_models.sv,
pulp_interfaces.sv,
axi_slice_dc_master_wrap.sv,
axi_slice_dc_slave_wrap.sv,
glitch_free_clk_mux.sv,
scm_2048x32.sv,
scm_512x32.sv,
]
components_xilinx:
incdirs: [
../includes,
]
targets: [
xilinx,
]
files: [
glitch_free_clk_mux_xilinx.sv,
]
components_rtl:
incdirs: [
../includes,
]
targets: [
rtl,
]
files: [
glitch_free_clk_mux.sv,
]
components_behav:
incdirs: [
../includes,
]
targets: [
rtl,
]
files: [
freq_meter.sv,
]
flags: [
skip_synthesis,
]
|
<filename>tests/vivado_backend_tests/projects/2/hls4ml_config.yml<gh_stars>1-10
Backend: Vivado
ClockPeriod: 5
HLSConfig:
LayerName:
layer1:
Precision:
bias: ap_fixed<7,1>
weight: ap_fixed<7,1>
ReuseFactor: 1
layer1_input:
Precision:
result: ap_fixed<22,2>
layer2:
Precision:
bias: ap_fixed<7,1>
weight: ap_fixed<7,1>
ReuseFactor: 1
layer3:
Precision:
bias: ap_fixed<7,1>
weight: ap_fixed<7,1>
ReuseFactor: 1
relu1:
Precision:
result: ap_fixed<7,1>
ReuseFactor: 1
relu2:
Precision:
result: ap_fixed<7,1>
ReuseFactor: 1
softmax_layer:
Precision: ap_fixed<34,10>
ReuseFactor: 1
exp_table_t: ap_fixed<18,8>
inv_table_t: ap_fixed<18,4>
table_size: 1024
Model:
Precision: ap_fixed<16,6>
ReuseFactor: 1
Strategy: Latency
IOType: io_parallel
KerasModel: !keras_model 'model_2/hls4ml_prj/keras_model.h5'
OutputDir: model_2/hls4ml_prj
ProjectName: myproject
Stamp: F5F1dCAB
XilinxPart: xcu250-figd2104-2L-e
|
<reponame>anmeza/platform_ml_models
AcceleratorConfig:
Driver: python
Interface: axi_stream
Precision:
Input: float
Output: float
Backend: VivadoAccelerator
Board: pynq-z2
ClockPeriod: 10
HLSConfig:
LayerName:
flatten:
StreamDepth: 10000
in_local:
StreamDepth: 10000
input_1:
Precision: ap_ufixed<8,0>
ReuseFactor: 16384
StreamDepth: 10000
Trace: false
out_local:
StreamDepth: 10000
q_activation:
Precision:
default: ap_fixed<9,6>
result: ap_fixed<8,3,AP_RND,AP_SAT>
ReuseFactor: 16384
StreamDepth: 10000
Trace: false
q_activation_1:
Precision:
default: ap_fixed<9,6>
result: ap_fixed<8,3,AP_RND,AP_SAT>
ReuseFactor: 16384
StreamDepth: 10000
Trace: false
q_activation_2:
Precision:
default: ap_fixed<9,6>
result: ap_fixed<8,3,AP_RND,AP_SAT>
ReuseFactor: 16384
StreamDepth: 10000
Trace: false
q_activation_3:
Precision:
default: ap_fixed<9,6>
result: ap_fixed<8,3,AP_RND,AP_SAT>
ReuseFactor: 16384
StreamDepth: 10000
Trace: false
q_activation_4:
Precision:
default: ap_fixed<9,6>
result: ap_fixed<8,3,AP_RND,AP_SAT>
q_activation_5:
Precision: ap_fixed<8,6>
ReuseFactor: 16384
StreamDepth: 10000
Trace: false
q_conv2d_batchnorm:
ConvImplementation: LineBuffer
Precision:
bias: ap_fixed<8,3>
default: ap_fixed<9,6>
result: ap_fixed<9,6>
weight: ap_fixed<8,3>
ReuseFactor: 16384
StreamDepth: 10000
Trace: false
accum_t: ap_fixed<14,6>
q_conv2d_batchnorm_1:
ConvImplementation: LineBuffer
Precision:
bias: ap_fixed<8,3>
default: ap_fixed<9,6>
result: ap_fixed<9,6>
weight: ap_fixed<8,3>
ReuseFactor: 16384
StreamDepth: 10000
Trace: false
accum_t: ap_fixed<14,6>
q_conv2d_batchnorm_1_linear:
Precision:
default: ap_fixed<9,6>
result: ap_fixed<9,6>
q_conv2d_batchnorm_2:
ConvImplementation: LineBuffer
Precision:
bias: ap_fixed<8,3>
default: ap_fixed<9,6>
result: ap_fixed<9,6>
weight: ap_fixed<8,3>
ReuseFactor: 16384
StreamDepth: 10000
Trace: false
accum_t: ap_fixed<14,6>
q_conv2d_batchnorm_2_linear:
Precision:
default: ap_fixed<9,6>
result: ap_fixed<9,6>
q_conv2d_batchnorm_3:
ConvImplementation: LineBuffer
Precision:
bias: ap_fixed<8,3>
default: ap_fixed<9,6>
result: ap_fixed<9,6>
weight: ap_fixed<8,3>
ReuseFactor: 16384
StreamDepth: 10000
Trace: false
accum_t: ap_fixed<14,6>
q_conv2d_batchnorm_3_linear:
Precision:
default: ap_fixed<9,6>
result: ap_fixed<9,6>
q_conv2d_batchnorm_4:
ConvImplementation: LineBuffer
Precision:
bias: ap_fixed<8,3>
default: ap_fixed<9,6>
result: ap_fixed<9,6>
weight: ap_fixed<8,3>
ReuseFactor: 16384
StreamDepth: 10000
Trace: false
accum_t: ap_fixed<14,6>
q_conv2d_batchnorm_4_linear:
Precision:
default: ap_fixed<9,6>
result: ap_fixed<9,6>
q_conv2d_batchnorm_linear:
Precision:
default: ap_fixed<9,6>
result: ap_fixed<9,6>
q_dense:
Precision:
bias: ap_fixed<8,3>
default: ap_fixed<8,6>
result: ap_fixed<8,6>
weight: ap_fixed<8,3>
ReuseFactor: 16384
StreamDepth: 10000
Trace: false
accum_t: ap_fixed<12,6>
zp2d_q_conv2d_batchnorm_1:
StreamDepth: 10000
zp2d_q_conv2d_batchnorm_2:
StreamDepth: 10000
zp2d_q_conv2d_batchnorm_3:
StreamDepth: 10000
zp2d_q_conv2d_batchnorm_4:
StreamDepth: 10000
Model:
FIFO_opt: 1
Precision: ap_fixed<8,6>
ReuseFactor: 16384
Strategy: Resource
SkipOptimizers:
- reshape_stream
IOType: io_stream
InputData: resnet_v1_eembc_quantized_tiny2/X_test.npy
KerasModel: !keras_model 'my-hls-test-quantized-tiny2/keras_model.h5'
OutputDir: my-hls-test-quantized-tiny2
OutputPredictions: resnet_v1_eembc_quantized_tiny2/y_test.npy
ProjectName: myproject
Stamp: 91b5812e
XilinxPart: xc7z020clg400-1
|
<gh_stars>1-10
# Copyright 2018-2021 The University of Birmingham
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
include:
- project: 'hog/Hog'
file: '/hog.yml'
ref: 'v0.2.1'
#################### example ###########################
### Change 'example' with your project name
GEN:example:
extends: .generate_project
variables:
extends: .vars
PROJECT_NAME: example
HOG_ONLY_SYNTH: 0 # if 1 runs only the synthesis
SIM:example:
extends: .simulate_project
variables:
extends: .vars
PROJECT_NAME: example
|
<reponame>pulp-platform/hypnos
hypnos:
incdirs:
- rtl/spi_module
files:
- src/common/pkg_common.sv
- src/hd_accelerator/hd_encoder/mixer/pkg_mixer_permutate.sv
- src/hd_accelerator/hd_encoder/mixer/mixer.sv
- src/hd_accelerator/hd_encoder/man_module/unary_encoder/unary_encoder.sv
- src/hd_accelerator/hd_encoder/man_module/pkg_perm_final.sv
- src/hd_accelerator/hd_encoder/man_module/man_module.sv
- src/hd_accelerator/hd_encoder/hd_unit/pkg_hd_unit.sv
- src/hd_accelerator/hd_encoder/hd_unit/hd_unit.sv
- src/hd_accelerator/hd_encoder/pkg_hd_encoder.sv
- src/hd_accelerator/hd_encoder/hd_encoder.sv
- src/hd_accelerator/hd_memory/pkg_hd_memory.sv
- src/hd_accelerator/hd_memory/hd_memory.sv
- src/hd_accelerator/ucode_sequencer/config_unit/pkg_memory_mapping.sv
- src/hd_accelerator/ucode_sequencer/ucode_decoder/pkg_ucode_decoder.sv
- src/hd_accelerator/ucode_sequencer/pkg_ucode_sequencer.sv
- src/hd_accelerator/ucode_sequencer/config_unit/config_unit.sv
- src/hd_accelerator/ucode_sequencer/pc_hw_loop_mod/pc_hwl_mod.sv
- src/hd_accelerator/ucode_sequencer/ucode_decoder/ucode_decoder.sv
- src/common/scm_1rw.sv
- src/hd_accelerator/ucode_sequencer/ucode_sequencer.sv
- src/hd_accelerator/shared_memory_interface/shared_memory_interface.sv
- src/hd_accelerator/hd_accelerator.sv
- src/cfg_iface_adapter/cfg_iface.sv
- src/spi_module/wakeupspi_spim_ctrl.sv
- src/spi_module/wakeupspi_spim_txrx.sv
- src/spi_module/spi_top.sv
- src/preprocessor/lbp.sv
- src/preprocessor/preprocessor_top.sv
- src/hypnos.sv
|
# Copyright 2019 Xilinx Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
description: 'Person re-identification model (backbone: resnet18).'
input size: 176*80
float ops: 1.1G
task: person reid
framework: pytorch
prune: 'no'
version: 1.4
files:
- name: pt_personreid-res18_market1501_176_80_1.1G_1.4
type: float & quantized
board: GPU
download link: https://www.xilinx.com/bin/public/openDownload?filename=pt_personreid-res18_market1501_176_80_1.1G_1.4.zip
checksum: e33cf512ccd3a738039e572e4a14c27f
- name: personreid-res18_pt
type: xmodel
board: zcu102 & zcu104 & kv260
download link: https://www.xilinx.com/bin/public/openDownload?filename=personreid-res18_pt-zcu102_zcu104_kv260-r1.4.0.tar.gz
checksum: a7ab9bae0f9bdb97bef029991628b8f4
- name: personreid-res18_pt
type: xmodel
board: vck190
download link: https://www.xilinx.com/bin/public/openDownload?filename=personreid-res18_pt-vck190-r1.4.0.tar.gz
checksum: 0c211456d8a87cbe96919ef0d2e82b46
- name: personreid-res18_pt
type: xmodel
board: u50-DPUCAHX8H & u50lv-DPUCAHX8H & u280-DPUCAHX8H
download link: https://www.xilinx.com/bin/public/openDownload?filename=personreid-res18_pt-u50-u50lv-u280-DPUCAHX8H-r1.4.0.tar.gz
checksum: 33181116939c3b1c72fac80ef079653c
license: https://github.com/Xilinx/Vitis-AI/blob/master/LICENSE
|
cv32e40p_regfile_rtl:
targets: [
rtl,
tsmc55,
gf22,
]
incdirs: [
./rtl/include,
]
files: [
./rtl/cv32e40p_register_file_test_wrap.sv,
./rtl/cv32e40p_register_file_latch.sv,
]
cv32e40p:
vlog_opts: [
-L fpnew_lib,
]
incdirs: [
./rtl/include,
../../rtl/includes,
]
files: [
./rtl/include/cv32e40p_apu_core_pkg.sv,
./rtl/include/cv32e40p_pkg.sv,
./bhv/include/cv32e40p_tracer_pkg.sv,
./rtl/cv32e40p_alu.sv,
./rtl/cv32e40p_alu_div.sv,
./rtl/cv32e40p_ff_one.sv,
./rtl/cv32e40p_popcnt.sv,
./rtl/cv32e40p_compressed_decoder.sv,
./rtl/cv32e40p_controller.sv,
./rtl/cv32e40p_cs_registers.sv,
./rtl/cv32e40p_decoder.sv,
./rtl/cv32e40p_int_controller.sv,
./rtl/cv32e40p_ex_stage.sv,
./rtl/cv32e40p_hwloop_regs.sv,
./rtl/cv32e40p_id_stage.sv,
./rtl/cv32e40p_if_stage.sv,
./rtl/cv32e40p_load_store_unit.sv,
./rtl/cv32e40p_mult.sv,
./rtl/cv32e40p_prefetch_buffer.sv,
./rtl/cv32e40p_prefetch_controller.sv,
./rtl/cv32e40p_obi_interface.sv,
./rtl/cv32e40p_aligner.sv,
./rtl/cv32e40p_sleep_unit.sv,
./rtl/cv32e40p_core.sv,
./rtl/cv32e40p_apu_disp.sv,
./rtl/cv32e40p_fifo.sv,
./rtl/cv32e40p_pmp.sv,
]
cv32e40p_vip_rtl:
targets: [
rtl,
]
incdirs: [
./rtl/include,
]
files: [
./bhv/cv32e40p_sim_clock_gate.sv,
./bhv/cv32e40p_wrapper.sv,
./bhv/cv32e40p_tracer.sv,
./bhv/cv32e40p_core_log.sv,
./bhv/cv32e40p_apu_tracer.sv,
]
defines: [
CV32E40P_TRACE_EXECUTION,
CV32E40P_APU_TRACE
]
flags: [
skip_synthesis,
]
cv32e40p_regfile_rtl:
targets: [
rtl,
tsmc55,
gf22,
]
incdirs: [
./rtl/include,
]
files: [
./rtl/cv32e40p_register_file_test_wrap.sv,
./rtl/cv32e40p_register_file_latch.sv,
]
cv32e40p_regfile_verilator:
targets: [
verilator,
]
files: [
./rtl/cv32e40p_register_file_ff.sv,
]
cv32e40p_regfile_fpga:
targets: [
xilinx,
]
incdirs: [
./rtl/include,
]
files: [
./rtl/cv32e40p_register_file_test_wrap.sv,
./rtl/cv32e40p_register_file_ff.sv,
]
|
<filename>vsg_files.yaml
file_list:
- common/source/clk_div_n.vhd
- common/sim/clk_div_n_long_code_tb.vhd
- common/sim/clk_div_n_stimulus_array_tb.vhd
- common/sim/clk_div_n_stimulus_file_tb.vhd
- common/sim/clk_div_n_vunit_tb.vhd
- demo/impl1/source/fpga_demo.vhd
|
name: 'test'
on:
push:
pull_request:
schedule:
- cron: '0 0 * * 5'
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- run: docker pull ghdl/vunit:llvm
- run: docker run -t -v $PWD:/build -w /build ghdl/vunit:llvm bash -c "make GNATMAKE='gnatmake -j'$(nproc)"
py:
needs: [build]
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- run: docker run -t -v $PWD:/build -w /build ghdl/vunit:llvm bash -c "apt update && apt install -y python3-pexpect && make -j$(nproc) test_micropython test_micropython_long"
test:
needs: [build]
strategy:
fail-fast: false
max-parallel: 3
matrix:
task: [
"tests_unit",
"tests_console",
"{1..99}",
"{100..199}",
"{200..299}",
"{300..399}",
"{400..499}",
"{500..599}",
"{600..699}",
"{700..799}",
"{800..899}",
"{900..999}",
]
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- run: docker run -t -v $PWD:/build -w /build ghdl/vunit:llvm bash -c "make -j$(nproc) ${{ matrix.task }}"
symbiflow:
strategy:
fail-fast: false
max-parallel: 2
matrix:
task: [ ECP5-EVN, ORANGE-CRAB ]
runs-on: ubuntu-latest
env:
DOCKER: 1
SYNTH_ECP5_FLAGS: -noflatten
FPGA_TARGET: ${{matrix.task}}
steps:
- uses: actions/checkout@v2
- run: make microwatt.json
- run: make microwatt.bit
- run: make microwatt.svf
- uses: actions/upload-artifact@v2
with:
name: ${{matrix.task}}-bitstream
path: microwatt.svf
# test building verilog target from yosys/nextpnr
verilog:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- run: make DOCKER=1 microwatt.v
|
# Copyright 2019 Xilinx Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
description: mobilenetv1_0.25 classifier on ImageNet.
input size: 128*128
float ops: 27M
task: classification
framework: tensorflow
prune: 'no'
version: 2.0
files:
- name: tf_mobilenetv1_0.25_imagenet_128_128_27M_2.0
type: float & quantized
board: GPU
download link: https://www.xilinx.com/bin/public/openDownload?filename=tf_mobilenetv1_0.25_imagenet_128_128_27M_2.0.zip
checksum: 4fd87171d66dc1e378d981de9852ca39
- name: mobilenet_v1_0_25_128_tf
type: xmodel
board: zcu102 & zcu104 & kv260
download link: https://www.xilinx.com/bin/public/openDownload?filename=mobilenet_v1_0_25_128_tf-zcu102_zcu104_kv260-r2.0.0.tar.gz
checksum: 50cb7fca14392023169b9761c390dbd3
- name: mobilenet_v1_0_25_128_tf
type: xmodel
board: vck190
download link: https://www.xilinx.com/bin/public/openDownload?filename=mobilenet_v1_0_25_128_tf-vck190-r2.0.0.tar.gz
checksum: ad44bcdb2c2a4d68cc6a6c84ea12ba61
- name: mobilenet_v1_0_25_128_tf
type: xmodel
board: vck50006pe-DPUCVDX8H-DWC
download link: https://www.xilinx.com/bin/public/openDownload?filename=mobilenet_v1_0_25_128_tf-vck50006pe-DPUCVDX8H-DWC-r2.0.0.tar.gz
checksum: dd6acb85933a3eda5e102f78d0cf7e72
- name: mobilenet_v1_0_25_128_tf
type: xmodel
board: u50lv-DPUCAHX8H-DWC & u55c-DPUCAHX8H-DWC
download link: https://www.xilinx.com/bin/public/openDownload?filename=mobilenet_v1_0_25_128_tf-u55c-u50lv-DPUCAHX8H-DWC-r2.0.0.tar.gz
checksum: b83907c76fe9187f7433ba34f685583f
license: https://github.com/Xilinx/Vitis-AI/blob/master/LICENSE
|
before_script:
- export SNPSLMD_LICENSE_FILE="1711@sac-lic-31:1711@sc-lic-32:1711@lic-34:1711@sc-lic-33:1711@sc-lic-14"
stages:
- test
regress:
stage: test
script:
- make USE_VM_ENV=1
- /home/bin/ci_sanity -tot `pwd`
|
name: Test
on:
push:
branches:
- master
pull_request:
branches:
- master
jobs:
test:
name: Test
runs-on: ubuntu-latest
steps:
- name: Wit Init
uses: sifive/wit/actions/[email protected]
with:
additional_packages: [email protected]:sifive/environment-blockci-sifive.git::0.7.1
- name: Run pioDUT simulation
uses: sifive/environment-blockci-sifive/actions/[email protected]
with:
command: -x 'runSim pioDUT'
- name: Run 16-pin pioDUT simulation
uses: sifive/environment-blockci-sifive/actions/[email protected]
with:
command: -x 'runSim pio16DUT'
|
---
project:
description: "A SoC with two crypto-supported RISC-V cores"
foundry: "SkyWater"
git_url: "https://github.com/phthinh/soric_project.git"
organization: ""
organization_url: ""
owner: "<NAME> - <NAME> - <NAME>"
process: "SKY130A"
project_name: "SORIC"
project_id: "321"
tags:
- "Open MPW"
category: "SORIC Caravel User Project"
top_level_netlist: "caravel/verilog/gl/caravel.v"
user_level_netlist: "verilog/gl/user_project_wrapper.v"
version: "1.00"
cover_image: "bin/soric.png"
|
<reponame>hprice99/ENGG4811_code
---
networkRows: 3
networkCols: 2
foxNetworkStages: 2
foxNodeFifos:
peToNetwork: 8
networkToPe: 1024
resultNodeCoord:
x: 0
y: 0
resultNodeFifos:
peToNetwork: 8
networkToPe: 1024
romNodeCoord:
x: 0
y: 2
resultUartFifoDepth: 1024
packetFormat:
multicastGroupBits: 1
multicastCoordBits: 1
readyFlagBits: 1
resultFlagBits: 1
matrixTypeBits: 1
matrixCoordBits: 8
matrixElementBits: 32
useMatrixInitFile: True
multicastAvailable: False
|
<gh_stars>10-100
#=========================================================================
# Synopsys PrimePower -- Gate-Level Power Estimation
#=========================================================================
# This step runs gate-level average power analysis with Synopsys PrimeTime PX
# or PrimePower, the successor of the PrimeTime PX feature
#
# Both average and time-base power analysis can be done
# Either gate-level or rtl switching activities can be used
#
# Author : <NAME>
# Date : 05.03.2021
#
name: synopsys-pt-power
#-------------------------------------------------------------------------
# Inputs and Outputs
#-------------------------------------------------------------------------
inputs:
- adk
- design.vcs.v
- design.pt.sdc
- design.spef.gz
- run.saif
- run.vcd
- design.namemap
outputs:
- primetime.session
- design.sdf
#-------------------------------------------------------------------------
# Commands
#-------------------------------------------------------------------------
commands:
- source run.sh
debug:
- source debug.sh
#-------------------------------------------------------------------------
# Parameters
#-------------------------------------------------------------------------
parameters:
design_name: undefined
#The strip path/saif_instance must be defined, without any quotations!
saif_instance: th/dut
# analysis mode either "averaged" or "time_based"
analysis_mode: averaged
# If VCD activity files is the result of a zero-delay simulation, it needs
# to be specified! (Otherwise SDF simulation will be assumed)
zero_delay_simulation: False
# Set library operating condition to be used for timing and power estimation
# Leaving it undefined PrimeTime will choose the operating condition of the first target library in the list
# eg. the Operating condition of the FreePDK45 ADK can be set "typical"
lib_op_condition: undefined
# Order of script execution
order:
- designer-interface.tcl
- setup-session.tcl
- read-design.tcl
- report-timing.tcl
- report-power.tcl
|
<reponame>niwis/snitch
# Copyright 2020 ETH Zurich and University of Bologna.
# Solderpad Hardware License, Version 0.51, see LICENSE for details.
# SPDX-License-Identifier: SHL-0.51
package:
name: spm_interface
authors:
- <NAME> <<EMAIL>>
dependencies:
common_cells: {path: ../../vendor/pulp_platform_common_cells}
export_include_dirs:
- include
sources:
- src/spm_interface.sv
- src/spm_rmw_adapter.sv
- target: simulation
files:
- src/spm_test.sv
- target: test
files:
# Level 0
- test/tb_spm_rmw_adapter.sv
|
<gh_stars>0
afe_readout_subsystem:
vlog_opts: [
"-L vivopulp_lib"
]
files: [
rtl/afe_reg_if.sv,
rtl/afe_ro_arbiter.sv,
rtl/afe_ro_conf_if.sv,
rtl/afe_sync_if.sv,
rtl/afe_buff_addrgen.sv,
rtl/afe_l2_addrgen.sv,
rtl/afe_top.sv,
rtl/afe_ro_udma_if.sv,
rtl/afe_ro_sram_buffer.sv,
rtl/afe_readout_subsystem.sv,
]
|
<gh_stars>0
### YamlMime:ManagedReference
items:
- uid: alps_.net_api.abstracPass
commentId: T:alps_.net_api.abstracPass
id: abstracPass
parent: alps_.net_api
children: []
langs:
- csharp
- vb
name: abstracPass
nameWithType: abstracPass
fullName: alps_.net_api.abstracPass
type: Interface
source:
remote:
path: alps .net api/alps .net api/OwlGraph.cs
branch: master
repo: http://imi-dev.imi.kit.edu:443/ukerz/alps-.net-api.git
id: abstracPass
path: OwlGraph.cs
startLine: 4331
assemblies:
- alps.net_api
namespace: alps_.net_api
summary: "\nhier sollte die Beschreibung der AbstractPass kommen, mal schauen was ich davon\nbrauche, vermutlich alles\n"
example: []
syntax:
content: public interface abstracPass
content.vb: Public Interface abstracPass
modifiers.csharp:
- public
- interface
modifiers.vb:
- Public
- Interface
references:
- uid: alps_.net_api
commentId: N:alps_.net_api
name: alps_.net_api
nameWithType: alps_.net_api
fullName: alps_.net_api
|
<gh_stars>1000+
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Author: <NAME>
# Human readable task name
name: empty ls
# Long form description.
description: |+
Check out https://www.zone443.dev , a community of sites with an extremely secure shared authentication mechanism.
P.S., The admin has a site running at https://admin.zone443.dev .
Note: Please don't request many subdomains. You don't need many subdomains to solve this challenge. Don't create a ton of subdomains in an attempt to exploit the subdomain creation process; there's no bug there that you should exploit. If a ton of subdomains get created, we may have to delete some old ones.
Note: There is a very high chance other teams will find your subdomain(s).
Note: If you have problems with https:// cert quotas, try an alternate CA, such as one from these lists:
https://www.xf.is/2020/06/30/list-of-free-acme-ssl-providers/
https://github.com/acmesh-official/acme.sh#supported-ca .
Update: The admin now has a slightly different browser that properly supports Buypass certificates.
# The flag
flag: CTF{m0_ambient_auth_m0_pr0blems}
# Task category. (one of hw, crypto, pwn, rev, web, net, misc)
category: web
# === the fields below will be filled by SRE or automation ===
# Task label
label: ''
# URL for web challenges
link: ''
# host/port for non-web challenges. This is a web challenge though, whatever.
host: |-
https://www.zone443.dev
https://admin.zone443.dev
# Special: we want to validate the hosts as http
links_for_testing:
- 'https://www.zone443.dev'
- 'https://admin.zone443.dev'
hosts_for_testing: []
# the URL for attachments, to be filled in by automation
attachment: ''
# is this challenge released? Will be set by SREs
visible: true
|
GitBase: ..
Targets:
SmurfKcu1500RssiOffload10GbE:
ImageDir: targets/SmurfKcu1500RssiOffload10GbE/images
Extensions:
- mcs
Kcu105TenGigE:
ImageDir: targets/Kcu105TenGigE/images
Extensions:
- bit
Releases:
all:
Primary: True
Targets:
- SmurfKcu1500RssiOffload10GbE
- Kcu105TenGigE
Types:
- Firmware-Only
SmurfKcu1500RssiOffload10GbE:
Targets:
- SmurfKcu1500RssiOffload10GbE
Types:
- Firmware-Only
Kcu105TenGigE:
Targets:
- Kcu105TenGigE
Types:
- Firmware-Only
|
<reponame>ultrazohm/ultrazohm_sw
image:
file: .devcontainer/Dockerfile
# List the ports you want to expose and what to do when they are served. See https://www.gitpod.io/docs/config-ports/
ports:
- port: 3000
onOpen: open-preview
# List the start up tasks. You can start them in parallel in multiple terminals. See https://www.gitpod.io/docs/config-start-tasks/
tasks:
- name: Docs dependencies
init: pip install -r docs/requirements.txt
vscode:
extensions:
- eamodio.gitlens
- james-yu.latex-workshop
- lextudio.restructuredtext
- streetsidesoftware.code-spell-checker
- dracula-theme.theme-dracula
- pkief.material-icon-theme
- atlassian.atlascode
|
#Descriptions:
#CMD: cpld command
#A: Assitant Board
#T: Target Board
#FUNC0: the function of this pin connection
#DIRECTION: A2T T2A T2T
#FRDM A-B-C-D DEFINITION
#| NAME | SIZE | CPLD_SCH | FRDM_SCH
#| A | (10*2) | JJ_2 | ?
#| B | (8*2) | JJ_1 | ?
#| C | (5*2) | JJ_3 | ?
#| D | (8*2) | JJ_4 | ?
#SINGLE: default 0, if the pin header is single on FRDM-Board, this should be set 1
SINGLE: 0
BUTTON_RST8:
IP: __BUTTON_RST
CMD: BDI
ALT: b
button_out_status:
PIN: CPLD_IO54
DIRECTION: OUT
BUTTON_RST7:
IP: __BUTTON_RST
CMD: BDH
ALT: b
button_out_status:
PIN: CPLD_IO53
DIRECTION: OUT
BUTTON_RST6:
IP: __BUTTON_RST
CMD: BDG
ALT: b
button_out_status:
PIN: CPLD_IO52
DIRECTION: OUT
BUTTON_RST5:
IP: __BUTTON_RST
CMD: BDF
ALT: b
button_out_status:
PIN: CPLD_IO14
DIRECTION: OUT
BUTTON_RST4:
IP: __BUTTON_RST
CMD: BDE
ALT: b
button_out_status:
PIN: CPLD_IO5
DIRECTION: OUT
BUTTON_RST3:
IP: __BUTTON_RST
CMD: BDD
ALT: b
button_out_status:
PIN: CPLD_IO4
DIRECTION: OUT
BUTTON_RST2:
IP: __BUTTON_RST
CMD: BDC
ALT: b
button_out_status:
PIN: CPLD_IO3
DIRECTION: OUT
BUTTON_RST1:
IP: __BUTTON_RST
CMD: BDB
ALT: b
button_out_status:
PIN: CPLD_IO2
DIRECTION: OUT
|
<filename>ikondemo/actions/datagen_start_NMX_overlap.yml<gh_stars>0
---
- hosts: data-generators
gather_facts: False
tasks:
- name: start gem data
command: "{{daemonize_cmd}} {{script_path}}/datagen_NMX_overlap.bash 1000"
tags:
- gdgem
|
# Technology Setup is done in hammer-tstech28-plugin/bwrc.yml.
vlsi.core.max_threads: 24
# File inputs
synthesis.inputs:
input_files: [
"src/SVM_SRAM/SVM.sv",
"src/SVM_SRAM/multiply_quantize.sv",
"src/SVM_SRAM/SVM_memories_214.sv"
]
top_module: "SVM"
# General Hammer Inputs
vlsi.inputs.sram_parameters: "src/sram_inputs.json"
vlsi.inputs.sram_parameters_meta: ["transclude", "json2list"]
# Hammer will auto-generate a CPF for simple power designs; see hammer/src/hammer-vlsi/defaults.yml for more info
vlsi.inputs.power_spec_mode: "auto"
vlsi.inputs.power_spec_type: "cpf"
# Specify clock signals
vlsi.inputs.clocks: [
{name: "clk", period: "454ns", uncertainty: "0.1ns"}
]
vlsi.inputs.custom_sdc_constraints: [
"set_input_delay -clock clk 0 [all_inputs]",
"set_output_delay -clock clk 0 [all_outputs]"
]
# Generate Make include to aid in flow
vlsi.core.build_system: make
# Power Straps
par.power_straps_mode: generate
par.generate_power_straps_method: by_tracks
par.blockage_spacing: 2.0
par.generate_power_straps_options:
by_tracks:
strap_layers:
- M3
- M4
- M5
- M6
- M7
- M8
- M9
pin_layers:
- M9
track_width: 7 # minimum allowed for M2 & M3
track_spacing: 0
track_spacing_M3: 1 # to avoid M2 shorts at higher density
track_start: 10
power_utilization: 0.05
power_utilization_M8: 1.0
power_utilization_M9: 1.0
# Placement Constraints
vlsi.inputs.placement_constraints:
- path: "SVM"
type: toplevel
x: 0
y: 0
width: 2000
height: 1500
margins:
left: 0
right: 0
top: 0
bottom: 0
- path: "SVM/mem/VALPHA_SRAM"
type: hardmacro
x: 200
y: 60
master: "TS1N28HPMFHVTB128X9M4SW"
- path: "SVM/mem/AALPHA_SRAM"
type: hardmacro
x: 300
y: 60
master: "TS1N28HPMFHVTB160X9M4SW"
- path: "SVM/mem/VSUPPORT_SRAM0"
type: hardmacro
x: 400
y: 60
master: "TS1N28HPMFHVTB224X144M4SW"
- path: "SVM/mem/VSUPPORT_SRAM1"
type: hardmacro
x: 500
y: 60
master: "TS1N28HPMFHVTB224X144M4SW"
- path: "SVM/mem/VSUPPORT_SRAM2"
type: hardmacro
x: 600
y: 60
master: "TS1N28HPMFHVTB224X144M4SW"
- path: "SVM/mem/VSUPPORT_SRAM3"
type: hardmacro
x: 700
y: 60
master: "TS1N28HPMFHVTB224X144M4SW"
- path: "SVM/mem/VSUPPORT_SRAM4"
type: hardmacro
x: 800
y: 60
master: "TS1N28HPMFHVTB224X144M4SW"
- path: "SVM/mem/VSUPPORT_SRAM5"
type: hardmacro
x: 900
y: 60
master: "TS1N28HPMFHVTB224X144M4SW"
- path: "SVM/mem/VSUPPORT_SRAM6"
type: hardmacro
x: 1000
y: 60
master: "TS1N28HPMFHVTB224X144M4SW"
- path: "SVM/mem/VSUPPORT_SRAM7"
type: hardmacro
x: 1100
y: 60
master: "TS1N28HPMFHVTB224X72M4SW"
- path: "SVM/mem/ASUPPORT_SRAM0"
type: hardmacro
x: 200
y: 700
master: "TS1N28HPMFHVTB224X144M4SW"
- path: "SVM/mem/ASUPPORT_SRAM1"
type: hardmacro
x: 300
y: 700
master: "TS1N28HPMFHVTB224X144M4SW"
- path: "SVM/mem/ASUPPORT_SRAM2"
type: hardmacro
x: 400
y: 700
master: "TS1N28HPMFHVTB224X144M4SW"
- path: "SVM/mem/ASUPPORT_SRAM3"
type: hardmacro
x: 500
y: 700
master: "TS1N28HPMFHVTB224X144M4SW"
- path: "SVM/mem/ASUPPORT_SRAM4"
type: hardmacro
x: 600
y: 700
master: "TS1N28HPMFHVTB224X144M4SW"
- path: "SVM/mem/ASUPPORT_SRAM5"
type: hardmacro
x: 700
y: 700
master: "TS1N28HPMFHVTB224X144M4SW"
- path: "SVM/mem/ASUPPORT_SRAM6"
type: hardmacro
x: 800
y: 700
master: "TS1N28HPMFHVTB224X144M4SW"
- path: "SVM/mem/ASUPPORT_SRAM7"
type: hardmacro
x: 900
y: 700
master: "TS1N28HPMFHVTB224X144M4SW"
- path: "SVM/mem/ASUPPORT_SRAM8"
type: hardmacro
x: 1000
y: 700
master: "TS1N28HPMFHVTB224X144M4SW"
- path: "SVM/mem/ASUPPORT_SRAM9"
type: hardmacro
x: 1100
y: 700
master: "TS1N28HPMFHVTB224X99M4SW"
- path: "SVM/place_obs_bottom"
type: obstruction
obs_types: ["place"]
x: 0
y: 0
width: 2000
height: 1.08 # 1 core site tall, necessary to avoid shorts
# VDD supply constraints
vlsi.inputs.supplies:
VDD: "0.90 V"
GND: "0 V"
vlsi.inputs.mmmc_corners: [
{
"name": "ss0p81v125c",
"type": "setup",
"voltage": "0.81 V",
"temp": "125 C"
},
{
"name": "ff0p99v0c",
"type": "hold",
"voltage": "0.99 V",
"temp": "0 C"
},
{
"name": "tt0p9v25c",
"type": "extra",
"voltage": "0.90 V",
"temp": "25 C"
}
]
# Pin placement constraints
vlsi.inputs.pin_mode: generated
vlsi.inputs.pin.generate_mode: semi_auto
vlsi.inputs.pin.assignments: [
{pins: "*", layers: ["M5", "M7"], side: "bottom"}
]
# SRAM Compiler compiler options
vlsi.core.sram_generator_tool: "sram_compiler"
vlsi.core.sram_generator_tool_path: ["hammer-tstech28-plugin"]
vlsi.core.sram_generator_tool_path_meta: "append"
|
package:
name: hypnos
authors:
- "<NAME> <<EMAIL>>"
dependencies:
tech_cells_generic: {git: "<EMAIL>:pulp-platform/tech_cells_generic.git", version: 0.2.3}
common_cells: { git: "<EMAIL>:pulp-platform/common_cells.git", version: 1.13.1 }
udma_core: { git: "https://github.com/pulp-platform/udma_core.git", version: 1.0.2 }
export_include_dirs:
- src/spi_module
sources:
- src/common/pkg_common.sv
- src/hd_accelerator/hd_encoder/mixer/pkg_mixer_permutate.sv
- src/hd_accelerator/hd_encoder/mixer/mixer.sv
- src/hd_accelerator/hd_encoder/man_module/unary_encoder/unary_encoder.sv
- src/hd_accelerator/hd_encoder/man_module/pkg_perm_final.sv
- src/hd_accelerator/hd_encoder/man_module/man_module.sv
- src/hd_accelerator/hd_encoder/hd_unit/pkg_hd_unit.sv
- src/hd_accelerator/hd_encoder/hd_unit/hd_unit.sv
- src/hd_accelerator/hd_encoder/pkg_hd_encoder.sv
- src/hd_accelerator/hd_encoder/hd_encoder.sv
- src/hd_accelerator/hd_memory/pkg_hd_memory.sv
- src/hd_accelerator/hd_memory/hd_memory.sv
- src/hd_accelerator/ucode_sequencer/config_unit/pkg_memory_mapping.sv
- src/hd_accelerator/ucode_sequencer/ucode_decoder/pkg_ucode_decoder.sv
- src/hd_accelerator/ucode_sequencer/pkg_ucode_sequencer.sv
- src/hd_accelerator/ucode_sequencer/config_unit/config_unit.sv
- src/hd_accelerator/ucode_sequencer/pc_hw_loop_mod/pc_hwl_mod.sv
- src/hd_accelerator/ucode_sequencer/ucode_decoder/ucode_decoder.sv
- src/common/scm_1rw.sv
- src/hd_accelerator/ucode_sequencer/ucode_sequencer.sv
- src/hd_accelerator/shared_memory_interface/shared_memory_interface.sv
- src/hd_accelerator/hd_accelerator.sv
- src/cfg_iface_adapter/cfg_iface.sv
- src/spi_module/wakeupspi_spim_ctrl.sv
- src/spi_module/wakeupspi_spim_txrx.sv
- src/spi_module/spi_top.sv
- src/preprocessor/lbp.sv
- src/preprocessor/preprocessor_top.sv
- src/hypnos.sv
|
<filename>fpga/modules/address/config.yml
---
name: address
board: boards/red-pitaya
cores:
- fpga/cores/comparator_v1_0
- fpga/cores/averager_counter_v1_0
- fpga/cores/address_generator_v1_0
- fpga/cores/edge_detector_v1_0
control_registers:
- addr # reset[0:0], trig_acq[1:1], delay_tvalid[6:2]
- dac_period # period on which the dac is played
status_registers:
- avg_ready
- avg_on_out
parameters:
bram_addr_width: 8
adc_width: 14
n_dac: 2
|
<reponame>Calculasians/HDC-Sensor-Fusion-Research
power.inputs.waveforms_meta: "append"
power.inputs.waveforms:
- "/tools/B/daniels/hammer-tsmc28/build/sim-par-rundir/hdc_sensor_fusion.vcd"
power.inputs.database: "/tools/B/daniels/hammer-tsmc28/build/par-rundir/latest"
power.inputs.tb_name: "hdc_sensor_fusion_tb"
power.inputs.saifs_meta: "append"
power.inputs.saifs:
- "/tools/B/daniels/hammer-tsmc28/build/sim-par-rundir/hdc_sensor_fusion.saif"
power.inputs.start_times: ["0"]
power.inputs.end_times: ["19998720"]
|
<reponame>usmnzain/serv
name: build-openlane-sky130
on: [push]
jobs:
build-openlane:
runs-on: ubuntu-latest
steps:
- name: Checkout subservient
uses: actions/checkout@v2
- name: Build with Openlane
uses: librecores/ci-fusesoc-action@migrate-dockerized
with:
core: serv
target: sky130
tool: openlane
- name: Store artifacts
uses: actions/upload-artifact@v2
with:
name: serv.gds
path: /home/runner/work/serv/serv/build/serv_1.1.0/sky130-openlane/gds/serv_synth_wrapper.gds
|
title: RFSoC 2x2
logo: /assets/img/logo.png
description: RFSoC 2x2 development board
show_downloads: true
google_analytics: UA-190351354-1
theme: jekyll-theme-dinky
|
image:
file: .gitpod.Dockerfile
tasks:
- before: >
sudo apt update ;
sudo apt install iverilog ;
pip3 install cocotb ;
export COCOTB_REDUCED_LOG_FMT=1 ;
git submodule update --init ;
cargo install rstbrun
init: >
cd /workspace/rstb_examples/test_axis_fifo &&
cargo build --release
command: >
cd /workspace/rstb_examples/test_axis_fifo &&
gp open hdl/axis_fifo.v &&
gp open src/tb.rs &&
gp open src/lib.rs ;
echo "build & run Rstb test 'rstbrun .'" ;
echo "run identical Cocotb test with 'make'"
# https://www.gitpod.io/docs/config-ports/
ports:
- port: 6080 # VNC for e.g. gtkwave
onOpen: notify
- port: 5900
onOpen: ignore
vscode:
extensions:
- [email protected]:RPslnvyzniF7C66mxHT+Hg==
- [email protected]:fJXQenGkzoZwUN/RddpuSw==
- matklad.rust-analyzer
- mhutchie.git-graph
|
<filename>appveyor.yml
build: false # Tell appveyor to not use msbuild
environment:
matrix:
- PYTHON: 2.7
platform:
- x86
- x64
init:
- "ECHO %PYTHON%"
- ps: Start-FileDownload 'http://download.microsoft.com/download/7/9/6/796EF2E4-801B-4FC4-AB28-B59FBF6D907B/VCForPython27.msi' C:\VCForPython27.msi; echo "Done"
- cmd: msiexec /i C:\VCForPython27.msi /quiet /qn
install:
# Miniconda setup + necessary external packages
- set PATH=C:\Miniconda;C:\Miniconda\Scripts;%PATH% # miniconda is already installed on appveyor: https://github.com/appveyor/ci/issues/359
- conda update conda --yes
- conda update --all --yes
- conda info -a
- conda install --yes bitarray cython ipython matplotlib mock nose numba numpy pyqt pyserial pytables pyyaml pyzmq qtpy scipy
- pip install progressbar-latest pyvisa pyvisa-py git+https://github.com/pyqtgraph/[email protected] #git+https://github.com/uvemas/ViTables@develop
# test packages required for USB interface
- pip install pyusb
- pip install pySiLibUSB
# pyBAR installation including additional specific packages
- pip install "basil_daq>=2.4.12,<3.0.0"
#- pip install git+https://github.com/SiLab-Bonn/basil@development
- pip install git+https://github.com/SiLab-Bonn/pyBAR_fei4_interpreter@development
- pip install "pixel_clusterizer>=3.1,<3.2"
- python.exe setup.py develop
- conda list
- pip list
test_script:
- cd pybar/testing
- nosetests test_analysis.py
|
sudo: required
language: ruby
services:
- docker
before_install:
- echo "Testing Docker Hub credentials"
- docker login -e=$DOCKER_EMAIL -u=$DOCKER_USERNAME -p=$DOCKER_PASSWORD
- echo "Docker Hub credentials are working"
- docker build -t build-springxd-base .
script:
- docker ps -a
after_success:
- echo "Test Success - Branch($TRAVIS_BRANCH) Pull Request($TRAVIS_PULL_REQUEST) Tag($TRAVIS_TAG)"
- if [[ "$TRAVIS_BRANCH" == "master" ]]; then echo -e "Push Container to Docker Hub"; fi
- docker login -e=$DOCKER_EMAIL -u=$DOCKER_USERNAME -p=$DOCKER_PASSWORD
- export REPO=jayjohnson/springxd-base
- export TAG=`if [ "$TRAVIS_BRANCH" == "master" ]; then echo "latest"; else echo $TRAVIS_BRANCH ; fi`
- docker build -f Dockerfile -t $REPO:$COMMIT .
- docker tag $REPO:$COMMIT $REPO:$TAG
- docker tag $REPO:$COMMIT $REPO:travis-$TRAVIS_BUILD_NUMBER
- docker push $REPO
|
<filename>infrastructure/kctf/base/challenge-skeleton/healthcheck/secrets/kustomization.yaml<gh_stars>0
secretGenerator:
- name: challenge-skeleton-healthcheck-secrets
files:
generatorOptions:
disableNameSuffixHash: true
labels:
type: generated
annotations:
note: generated
|
---
name: adc-dac
board: boards/red-pitaya
cores:
- fpga/cores/axi_ctl_register_v1_0
- fpga/cores/axi_sts_register_v1_0
- fpga/cores/dna_reader_v1_0
- fpga/cores/redp_adc_v1_0
- fpga/cores/redp_dac_v1_0
memory:
- name: control
offset: '0x60000000'
range: 4K
- name: status
offset: '0x50000000'
range: 4K
control_registers:
- led
- dac[n_dac]
status_registers:
- adc[n_adc]
parameters:
fclk0: 200000000
dac_width: 14
adc_width: 14
n_dac: 2
n_adc: 2
xdc:
- boards/red-pitaya/config/ports.xdc
- boards/red-pitaya/config/clocks.xdc
drivers:
- server/drivers/common.hpp
- ./adc_dac.hpp
web:
- web/index.html
- web/main.css
- web/koheron.ts
|
<filename>Bender.yml
package:
name: redundancy_cells
authors:
- "<NAME> <<EMAIL>>"
dependencies:
tech_cells_generic: { git: "https://github.com/pulp-platform/tech_cells_generic.git", version: 0.2.3 }
axi: { git: "https://github.com/pulp-platform/axi.git", version: 0.29.1 }
hci: { git: "https://github.com/pulp-platform/hci.git", version: 1.0.8 }
common_verification: { git: "https://github.com/pulp-platform/common_verification.git", version: 0.2.0 }
register_interface: { git: "https://github.com/pulp-platform/register_interface.git", version: 0.3.1 }
common_cells: { git: "https://github.com/pulp-platform/common_cells.git", version: 1.24.0 }
sources:
# Source files grouped in levels. Files in level 0 have no dependencies on files in this
# package. Files in level 1 only depend on files in level 0, files in level 2 on files in
# levels 1 and 0, etc. Files within a level are ordered alphabetically.
# Level 0
- rtl/cTCLS_unit/ctcls_manager_reg_pkg.sv
- rtl/lowrisc_ecc/prim_secded_13_8_dec.sv
- rtl/lowrisc_ecc/prim_secded_13_8_enc.sv
- rtl/lowrisc_ecc/prim_secded_22_16_dec.sv
- rtl/lowrisc_ecc/prim_secded_22_16_enc.sv
- rtl/lowrisc_ecc/prim_secded_39_32_dec.sv
- rtl/lowrisc_ecc/prim_secded_39_32_enc.sv
- rtl/lowrisc_ecc/prim_secded_72_64_dec.sv
- rtl/lowrisc_ecc/prim_secded_72_64_enc.sv
- rtl/lowrisc_ecc/prim_secded_pkg.sv
- rtl/TMR_voter.sv
- rtl/TMR_word_voter.sv
# Level 1
- rtl/cTCLS_unit/ctcls_manager_reg_top.sv
- rtl/ecc_concat_32_64.sv
- rtl/ecc_scrubber.sv
- rtl/ecc_sram_wrap.sv
- rtl/BUS_enc_dec/AXI_bus_ecc_dec.sv
- rtl/BUS_enc_dec/AXI_bus_ecc_enc.sv
- rtl/BUS_enc_dec/hci_core_intf_ecc_dec.sv
- rtl/BUS_enc_dec/hci_core_intf_ecc_enc.sv
- rtl/BUS_enc_dec/hci_mem_intf_ecc_dec.sv
- rtl/BUS_enc_dec/hci_mem_intf_ecc_enc.sv
- rtl/BUS_enc_dec/PE_XBAR_bus_ecc_dec.sv
- rtl/BUS_enc_dec/PE_XBAR_bus_ecc_enc.sv
- rtl/BUS_enc_dec/TCDM_XBAR_bus_ecc_dec.sv
- rtl/BUS_enc_dec/TCDM_XBAR_bus_ecc_enc.sv
- rtl/BUS_enc_dec/XBAR_DEMUX_BUS_ecc_dec.sv
- rtl/BUS_enc_dec/XBAR_DEMUX_BUS_ecc_enc.sv
- rtl/TMR_voter_detect.sv
# Level 2
- include_dirs:
- rtl/cTCLS_unit
files:
- rtl/cTCLS_unit/cTCLS_unit.sv
- rtl/bitwise_TMR_voter.sv
- target: test
files:
- test/tb_ecc_scrubber.sv
- test/tb_ecc_secded.sv
- test/tb_ecc_sram.sv
- test/tb_tmr_voter.sv
- test/tb_tmr_voter_detect.sv
- test/tb_tmr_word_voter.sv
- test/tb_bitwise_tmr_voter.sv
|
<reponame>StanfordAHA/garnet
name: Tile_PE
commands:
- bash get_Tile_PE_outputs.sh
inputs:
- design.v
outputs:
- Tile_PE_tt.lib
- Tile_PE_tt.db
- Tile_PE.lef
- Tile_PE.gds
- Tile_PE.vcs.v
- Tile_PE.vcs.pg.v
- Tile_PE.pt.sdc
- Tile_PE.spef.gz
- Tile_PE.lvs.v
- Tile_PE.sdf
postconditions:
- assert File( 'outputs/Tile_PE_tt.lib' ) # must exist
- assert File( 'outputs/Tile_PE_tt.db' ) # must exist
- assert File( 'outputs/Tile_PE.lef' ) # must exist
- assert File( 'outputs/Tile_PE.gds' ) # must exist
- assert File( 'outputs/Tile_PE.vcs.v' ) # must exist
- assert File( 'outputs/Tile_PE.vcs.pg.v' ) # must exist
- assert File( 'outputs/Tile_PE.pt.sdc' ) # must exist
- assert File( 'outputs/Tile_PE.spef.gz' ) # must exist
- assert File( 'outputs/Tile_PE.lvs.v' ) # must exist
- assert File( 'outputs/Tile_PE.sdf' ) # must exist
|
# .readthedocs.yml
# Read the Docs configuration file
# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details
version: 2
# Use conda because default Python is not compiled with --enable-shared
conda:
environment: documentation/conda.yml
python:
install:
- method: pip
path: .
extra_requirements:
- bus
sphinx:
configuration: documentation/source/conf.py
|
# Copyright 2020 ETH Zurich and University of Bologna.
# Licensed under the Apache License, Version 2.0, see LICENSE for details.
# SPDX-License-Identifier: Apache-2.0
package:
name: snitch_ipu
authors:
- <NAME> <<EMAIL>>
- <NAME> <<EMAIL>>
dependencies:
common_cells: {path: ../../vendor/pulp_platform_common_cells}
snitch: {path: ../../ip/snitch}
sources:
# Level 0:
- src/snitch_ipu_pkg.sv
# Level 1:
- src/snitch_ipu_alu.sv
# Level 2:
- src/snitch_int_ss.sv
|
<reponame>sgherbst/svreal
steps:
- command: |
# set up environment
source /cad/modules/tcl/init/bash
module load base xcelium/19.03.003 vcs
/usr/local/miniconda/bin/python3.7 -m venv venv
source venv/bin/activate
printenv
# upgrade pip
python -m pip install --upgrade pip
# run regression script
source regress.sh
label: "test"
timeout_in_minutes: 60
agents:
fault2: "true"
- command: |
# set up environment
source /etc/environment
python3.7 -m venv venv
source venv/bin/activate
printenv
# upgrade pip
python -m pip install --upgrade pip
# run regression script
source regress.sh
label: "test_emu"
timeout_in_minutes: 60
agents:
fpga_verif: "true"
|
apiVersion: v1
kind: Service
metadata:
name: sourcerer-app
labels:
app: sourcerer-app
spec:
type: ClusterIP
ports:
- port: 80
targetPort: 80
selector:
app: sourcerer-app
---
apiVersion: apps/v1beta1
kind: Deployment
metadata:
name: sourcerer-app
spec:
replicas: $REPLICAS
template:
metadata:
labels:
app: sourcerer-app
spec:
containers:
- name: sourcerer-app
image: gcr.io/sourcerer-1377/sourcerer-app:$CONTAINER_TAG
imagePullPolicy: Always
resources:
requests:
cpu: 100m
memory: 100Mi
|
<reponame>een5afr-public/ariane<filename>docs/_data/docs.yml
- title: Introduction
docs:
- home
- documentation
- title: Architecture
docs:
- pcgen_stage
- if_stage
- id_stage
- issue_stage
- ex_stage
- commit_stage
|
variables:
VSIM: vsim-10.7e
VLOG: vlog-10.7e
before_script:
- export PATH="~balasr/.local/bin":$PATH
stages:
- build
- test
build:
stage: build
script:
- cd testbench
- make build
test_pulp_tap:
stage: test
script:
- cd testbench
- make build run
|
variables:
build_num: $(Build.BuildNumber)
jobs:
- job: PyPi_Linux
pool:
vmImage: 'ubuntu-18.04'
steps:
- bash: echo "##vso[task.prependpath]$CONDA/bin"
displayName: Add conda to PATH
- script: |
sudo apt-get install zlib1g-dev
echo "Which Conda: `which conda`"
conda create --yes --name BuildEnv
source activate BuildEnv
echo "Conda: `which conda`"
echo "Conda version: `conda --version`"
python -m pip install wheel twine ivpm
displayName: 'Install wheel+twine+ivpm'
- script: |
source activate BuildEnv
conda install -y -c timvideos --name BuildEnv iverilog verilator
conda install -y -c anaconda --name BuildEnv zlib
echo "verilator: `which verilator`"
echo "iverilog: `which iverilog`"
displayName: 'Setup Test Tools'
# - script: |
# source activate BuildEnv
# ./scripts/ivpm.py update
# source etc/rv_bfm_env.sh
# which_conda=`which conda`
# conda_bindir=`dirname $which_conda`
# conda_dir=`dirname $conda_bindir`
# export LD_LIBRARY_PATH=$conda_dir/lib:$LD_LIBRARY_PATH
# cd ve/rv_out/sim
# echo "** Running Icarus Verilog Test"
# runtest.pl -test tests/rv_out_smoke_test.f -sim ivl
# echo "** Running Verilator Test"
# runtest.pl -test tests/rv_out_smoke_test.f -sim vlsim +tool.vlsim.tracevcd
# displayName: 'Run Tests'
- script: |
python setup.py bdist_wheel --universal
displayName: 'Build Wheel'
- task: TwineAuthenticate@1
condition: eq(variables['Build.SourceBranchName'], 'master')
inputs:
pythonUploadServiceConnection: pypi-rv_bfms
- script: |
# Only deploy from master
if test "$(Build.SourceBranchName)" = "master"; then
python -m twine --version
echo "Calling twine"
python -m twine upload -r pybfms_rv_bfms --config-file $(PYPIRC_PATH) dist/*.whl
echo "Calling twine complete"
fi
displayName: 'Upload to PyPi'
|
<reponame>dosadih/svut<gh_stars>10-100
name: macOS
on: [push]
jobs:
lint-code:
runs-on: macos-latest
steps:
- uses: actions/checkout@v2
- run: brew install pylint
- run: cd test; ./lint.sh
ci-tests:
runs-on: macos-latest
steps:
- uses: actions/checkout@v2
- run: brew install icarus-verilog
- run: iverilog -V
- run: brew install verilator
- run: verilator -V
- run: cd test; ./regression.sh
|
pull_request_rules:
- name: remove outdated reviews
conditions:
- base=master
actions:
dismiss_reviews:
pull_request_rules:
- name: automatic squash-and-merge on CI success and review
conditions:
- status-success=continuous-integration/travis-ci/pr
- "#approved-reviews-by>=1"
- "#changes-requested-reviews-by=0"
- base=master
- label="Please Merge"
- label!="DO NOT MERGE"
actions:
merge:
method: squash
strict: smart
strict_method: merge
|
<reponame>rits-drsl/ZybotR2-96-fpt19
%YAML 1.2
---
conn00:
dst_path_index: 9
dst_path_number: 1
src_path_index: 40
src_path_number: 2
conn01:
dst_path_index: 100
dst_path_number: 3
src_path_index: 67
src_path_number: 1
conn02:
dst_path_index: 94
dst_path_number: 4
src_path_index: 40
src_path_number: 3
conn03:
dst_path_index: 100
dst_path_number: 2
src_path_index: 34
src_path_number: 4
conn04:
dst_path_index: 40
dst_path_number: 2
src_path_index: 150
src_path_number: 0
conn05:
dst_path_index: 34
dst_path_number: 4
src_path_index: 224
src_path_number: 0
conn06:
dst_path_index: 40
dst_path_number: 3
src_path_index: 3
src_path_number: 0
conn07:
dst_path_index: 67
dst_path_number: 1
src_path_index: 77
src_path_number: 0
conn08:
dst_path_index: 70
dst_path_number: 0
src_path_index: 100
src_path_number: 3
conn09:
dst_path_index: 144
dst_path_number: 0
src_path_index: 9
src_path_number: 1
conn10:
dst_path_index: 217
dst_path_number: 0
src_path_index: 100
src_path_number: 2
conn11:
dst_path_index: 291
dst_path_number: 0
src_path_index: 94
src_path_number: 4
conn12:
dst_path_index: 77
dst_path_number: 2
src_path_index: 90
src_path_number: 1
conn13:
dst_path_index: 57
dst_path_number: 4
src_path_index: 77
src_path_number: 2
conn14:
dst_path_index: 77
dst_path_number: 3
src_path_index: 57
src_path_number: 4
conn15:
dst_path_index: 90
dst_path_number: 1
src_path_index: 77
src_path_number: 3
num: 16
|
# Adapted from Garnet and ButterPHY
name: prelvs_fix
commands:
- |
mkdir -p outputs
python fix_adbg_intf_i.py inputs/design.lvs.v
python fix_term_vss.py inputs/design_alt.lvs.v
mv inputs/design_alt_alt.lvs.v outputs/design.lvs.v
inputs:
- design.lvs.v
outputs:
- design.lvs.v
|
package:
name: tech_cells_generic
description: "Technology-agnostic building blocks."
sources:
- target: all(fpga, xilinx)
files:
- src/deprecated/cluster_clk_cells_xilinx.sv
- src/fpga/tc_clk_xilinx.sv
- target: not(all(fpga, xilinx))
files:
- src/deprecated/cluster_clk_cells.sv
- src/deprecated/pulp_clk_cells.sv
- src/rtl/tc_clk.sv
- target: not(synthesis)
files:
- src/deprecated/cluster_pwr_cells.sv
- src/deprecated/generic_memory.sv
- src/deprecated/generic_rom.sv
- src/deprecated/pad_functional.sv
- src/deprecated/pulp_buffer.sv
- src/deprecated/pulp_pwr_cells.sv
- src/tc_pwr.sv
|
<gh_stars>0
name: Assets
on:
release:
types:
- created
pull_request:
env:
CMAKE_VERSION: '3.20.0'
jobs:
archive:
name: Linux
runs-on: ubuntu-latest
container: centos:7
strategy:
matrix:
sim:
- ON
- OFF
steps:
- name: Install dependencies
run: |
yum install -y epel-release
yum install -y curl make gcc gcc-c++ rpm-build git
- name: Install CMake
run: curl -L https://github.com/Kitware/CMake/releases/download/v$CMAKE_VERSION/cmake-$CMAKE_VERSION-Linux-x86_64.tar.gz | tar xz --strip-components=1 -C /usr
- uses: actions/checkout@v2
- name: Configure
run: cmake . -DCMAKE_BUILD_TYPE=Release -DBUILD_FLETCHER_AWS-SIM=${{ matrix.sim }}
- name: Package
run: make -j package
- id: tarball
run: echo "##[set-output name=name;]$(ls fletch*.tar.gz)"
- name: Install
run: tar xvfz ./${{ steps.tarball.outputs.name }} -C /usr
- name: Upload tarball
uses: actions/upload-release-asset@v1
if: ${{ github.event_name == 'release' && github.event.action == 'created' }}
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ github.event.release.upload_url }}
asset_path: ${{ steps.tarball.outputs.name }}
asset_name: ${{ steps.tarball.outputs.name }}
asset_content_type: application/octet-stream
centos:
name: CentOS
runs-on: ubuntu-latest
strategy:
matrix:
version:
- 7
- 8
sim:
- ON
- OFF
container: centos:${{ matrix.version }}
steps:
- uses: actions/checkout@v2
- name: Install dependencies
run: |
yum install -y epel-release
yum install -y curl make rpm-build gcc gcc-c++ git
- name: Install CMake
run: curl -L https://github.com/Kitware/CMake/releases/download/v$CMAKE_VERSION/cmake-$CMAKE_VERSION-Linux-x86_64.tar.gz | tar xz --strip-components=1 -C /usr
- name: Configure
run: cmake . -DCMAKE_BUILD_TYPE=Release -DBUILD_FLETCHER_AWS-SIM=${{ matrix.sim }}
- name: Package
run: make -j package
- id: rpm
run: echo "##[set-output name=name;]$(ls fletch*.rpm)"
- name: Upload rpm
uses: actions/upload-release-asset@v1
if: ${{ github.event_name == 'release' && github.event.action == 'created' }}
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ github.event.release.upload_url }}
asset_path: ${{ steps.rpm.outputs.name }}
asset_name: ${{ steps.rpm.outputs.name }}
asset_content_type: application/octet-stream
ubuntu:
name: Ubuntu
strategy:
matrix:
version:
- 18.04
- 20.04
sim:
- ON
- OFF
runs-on: ubuntu-${{ matrix.version }}
steps:
- uses: actions/checkout@v2
- name: Configure
run: cmake . -DCMAKE_BUILD_TYPE=Release -DBUILD_FLETCHER_AWS-SIM=${{ matrix.sim }}
- name: Package
run: make -j package
- id: deb
run: echo "##[set-output name=name;]$(ls fletch*.deb)"
- name: Upload deb
uses: actions/upload-release-asset@v1
if: ${{ github.event_name == 'release' && github.event.action == 'created' }}
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ github.event.release.upload_url }}
asset_path: ${{ steps.deb.outputs.name }}
asset_name: ${{ steps.deb.outputs.name }}
asset_content_type: application/octet-stream
|
# Copyright 2020 ETH Zurich and University of Bologna.
# Solderpad Hardware License, Version 0.51, see LICENSE for details.
# SPDX-License-Identifier: SHL-0.51
package:
name: snitch_icache
authors:
- <NAME> <<EMAIL>>
- <NAME> <<EMAIL>>
dependencies:
common_cells: {path: ../../vendor/pulp_platform_common_cells}
tech_cells_generic: {path: ../../vendor/pulp_platform_tech_cells_generic}
snitch: {path: ../../ip/snitch}
sources:
# Level 0:
- src/snitch_icache_pkg.sv
# Level 1:
- src/snitch_icache_l0.sv
- src/snitch_icache_refill.sv
- src/snitch_icache_lfsr.sv
- src/snitch_icache_lookup.sv
# Level 2:
- src/snitch_icache_handler.sv
# Level 3:
- src/snitch_icache.sv
- target: test
files:
- test/snitch_icache_l0_tb.sv
|
<filename>.travis.yml
language: python
addons:
apt:
sources:
- ubuntu-toolchain-r-test
packages:
- verilator
- verilog
- g++-4.9
python:
- "3.6"
install:
# Convenience script for installing coreir on travis
- wget https://raw.githubusercontent.com/phanrahan/magma/master/.travis/install_coreir.sh
- source install_coreir.sh
- pip install -r requirements.txt
- pip install python-coveralls
- pip install pytest-cov pytest-codestyle
- pip install -e .
# Begin setup CoSA dependencies
- pysmt-install --z3 --confirm-agreement
- export PYTHONPATH="/home/travis/.smt_solvers/python-bindings-3.6:${PYTHONPATH}"
- export LD_LIBRARY_PATH="/home/travis/.smt_solvers/python-bindings-3.6:${LD_LIBRARY_PATH}"
- pysmt-install --check
# End setup CoSA dependencies
- git clone https://github.com/StanfordVLSI/Genesis2.git
- rm -rf Genesis2/Genesis2Tools/PerlLibs/ExtrasForOldPerlDistributions/Compress
# Set env variables for genesis (installed earlier)
- export GENESIS_HOME=`pwd`/Genesis2/Genesis2Tools
- export PATH=$GENESIS_HOME/bin:$GENESIS_HOME/gui/bin:$PATH
- export PERL5LIB=$GENESIS_HOME/PerlLibs/ExtrasForOldPerlDistributions:$PERL5LIB
# For some reason, we get
# > import filecmp
# E ModuleNotFoundError: No module named 'filecmp'
# see https://travis-ci.com/rsetaluri/magma_connect_box/builds/78883000
- wget https://raw.githubusercontent.com/python/cpython/master/Lib/filecmp.py
script:
- pytest --codestyle
--cov cb
--cov common
--cov global_controller
--cov memory_core
--cov pe_core
--cov sb
--cov simple_cb
--cov interconnect
--ignore=filecmp.py
--ignore=Genesis2/
-v --cov-report term-missing .
after_success:
- coveralls
|
<filename>vlsi/example-design.yml
# General Hammer Inputs Related to the Design and Build System
# Generate Make include to aid in flow
vlsi.core.build_system: make
vlsi.core.max_threads: 12
# Hammer will auto-generate a CPF for simple power designs; see hammer/src/hammer-vlsi/defaults.yml for more info
vlsi.inputs.power_spec_mode: "auto"
vlsi.inputs.power_spec_type: "cpf"
# Specify clock signals
vlsi.inputs.clocks: [
{name: "clock", period: "2ns", uncertainty: "0.1ns"}
]
# Specify pin properties
# Default pin placement can be set by the tool
# Default pin layer assignments can be found in some tech plug-ins
vlsi.inputs.pin_mode: generated
vlsi.inputs.pin.generate_mode: semi_auto
# Specify the floorplan
# Default floor plan can be set by the tool
# The path name should match the VLSI_TOP makefile parameter if it is set
par.innovus.floorplan_mode: "auto"
vlsi.inputs.placement_constraints:
- path: "ChipTop"
type: toplevel
x: 0
y: 0
width: 300
height: 300
margins:
left: 0
right: 0
top: 0
bottom: 0
|
language: cpp
# run on new infrastructure
dist: xenial
sudo: false
cache:
apt: true
directories:
$RISCV
$VERILATOR_ROOT
timeout: 1000
# required packages to install
addons:
apt:
sources:
- ubuntu-toolchain-r-test
packages:
- gcc-7
- g++-7
- gperf
- autoconf
- automake
- autotools-dev
- libmpc-dev
- libmpfr-dev
- libgmp-dev
- gawk
- build-essential
- bison
- flex
- texinfo
- python-pexpect
- libusb-1.0-0-dev
- default-jdk
- zlib1g-dev
- valgrind
env:
global:
- RISCV="/home/travis/riscv_install"
- VERILATOR_ROOT="/home/travis/verilator-4.018"
before_install:
- export CXX=g++-7 CC=gcc-7
# setup dependent paths
- export PATH=$RISCV/bin:$VERILATOR_ROOT/bin:$PATH
- export LIBRARY_PATH=$RISCV/lib
- export LD_LIBRARY_PATH=$RISCV/lib
- export C_INCLUDE_PATH=$RISCV/include:$VERILATOR_ROOT/share/verilator/include
- export CPLUS_INCLUDE_PATH=$RISCV/include:$VERILATOR_ROOT/share/verilator/include
- export PKG_CONFIG_PATH=$VERILATOR_ROOT/share/pkgconfig
# number of parallel jobs to use for make commands and simulation
- export NUM_JOBS=4
- ci/make-tmp.sh
- git submodule update --init --recursive
stages:
- download
- compile1
- compile2
- test
jobs:
include:
- stage: download
name: download pulp gcc
script:
- ci/download-pulp-gcc.sh
- stage: compile2
name: build verilator
script:
- ci/install-verilator.sh
- stage: compile2
name: build openocd
script:
- ci/get-openocd.sh
- stage: test
name: run openocd debug module tests
script:
- ci/veri-run-openocd-compliance.sh
# extra time during long builds
install: travis_wait
|
<gh_stars>1-10
---
mapper:
class: MyMapper
initialize: file.abnf
selector:
class: MySelector
require: test/data/external_class.rb
attribute1: 3
attr2: toto
|
<reponame>DanielTRYTRYLOOK/RDF-2020
name: pci_bridge32
clock_port: wb_clk_i
verilog:
- bus_commands.v
- pci_async_reset_flop.v
- pci_bridge32.v
- pci_cbe_en_crit.v
- pci_conf_cyc_addr_dec.v
- pci_conf_space.v
- pci_constants.v
- pci_cur_out_reg.v
- pci_delayed_sync.v
- pci_delayed_write_reg.v
- pci_frame_crit.v
- pci_frame_en_crit.v
- pci_frame_load_crit.v
- pci_in_reg.v
- pci_io_mux.v
- pci_io_mux_ad_en_crit.v
- pci_io_mux_ad_load_crit.v
- pci_irdy_out_crit.v
- pci_mas_ad_en_crit.v
- pci_mas_ad_load_crit.v
- pci_mas_ch_state_crit.v
- pci_master32_sm.v
- pci_master32_sm_if.v
- pci_out_reg.v
- pci_par_crit.v
- pci_parity_check.v
- pci_pci_decoder.v
- pci_pci_tpram.v
- pci_perr_crit.v
- pci_pcir_fifo_control.v
- pci_pciw_fifo_control.v
- pci_pciw_pcir_fifos.v
- pci_perr_en_crit.v
- pci_ram_16x40d.v
- pci_rst_int.v
- pci_serr_crit.v
- pci_serr_en_crit.v
- pci_spoci_ctrl.v
- pci_sync_module.v
- pci_synchronizer_flop.v
- pci_target32_clk_en.v
- pci_target32_devs_crit.v
- pci_target32_interface.v
- pci_target32_sm.v
- pci_target32_stop_crit.v
- pci_target32_trdy_crit.v
- pci_target_unit.v
- pci_user_constants.v
- pci_wb_addr_mux.v
- pci_wb_decoder.v
- pci_wb_master.v
- pci_wb_slave.v
- pci_wb_slave_unit.v
- pci_wb_tpram.v
- pci_wbr_fifo_control.v
- pci_wbs_wbb3_2_wbb2.v
- pci_wbw_fifo_control.v
- pci_wbw_wbr_fifos.v
- timescale.v
|
description: Featherweight IP programmable interrupt controller
compatible: "fw_pic"
include: [interrupt-controller.yaml, base.yaml]
properties:
reg:
required: true
"#interrupt-cells":
const: 1
num-irqs:
type: int
required: true
description: Number of IRQs the intc supports
irq:
type: int
required: true
description: Interrupt index for PIC
interrupt-cells:
- irq
|
name: C/C++ CI Build and Test (dummy)
on:
workflow_dispatch:
push:
jobs:
build:
runs-on: ubuntu-latest
strategy:
matrix:
build-type: [Debug, Release, RelWithDebInfo]
steps:
- run: exit 0
test:
runs-on: ubuntu-latest
steps:
- run: exit 0
|
<filename>models/AI-Model-Zoo/model-list/cf_inceptionv3_imagenet_299_299_11.4G_2.0/model.yaml
# Copyright 2019 Xilinx Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
description: inception-v3 classifier on ImageNet.
input size: 299*299
float ops: 11.4G
task: classification
framework: caffe
prune: 'no'
version: 2.0
files:
- name: cf_inceptionv3_imagenet_299_299_11.4G_2.0
type: float & quantized
board: GPU
download link: https://www.xilinx.com/bin/public/openDownload?filename=cf_inceptionv3_imagenet_299_299_11.4G_2.0.zip
checksum: 925973b94a7b304669a417def6f402d6
- name: inception_v3
type: xmodel
board: zcu102 & zcu104 & kv260
download link: https://www.xilinx.com/bin/public/openDownload?filename=inception_v3-zcu102_zcu104_kv260-r2.0.0.tar.gz
checksum: d7b3e82b227184ada86251739cc233d0
- name: inception_v3
type: xmodel
board: vck190
download link: https://www.xilinx.com/bin/public/openDownload?filename=inception_v3-vck190-r2.0.0.tar.gz
checksum: d0407c86279d1e69a021e1cf6f15c3fc
- name: inception_v3
type: xmodel
board: vck50006pe-DPUCVDX8H-DWC
download link: https://www.xilinx.com/bin/public/openDownload?filename=inception_v3-vck50006pe-DPUCVDX8H-DWC-r2.0.0.tar.gz
checksum: 46af841e4c2909262435a9cf84e4333fe
- name: inception_v3
type: xmodel
board: u50lv-DPUCAHX8H
download link: https://www.xilinx.com/bin/public/openDownload?filename=inception_v3-u50lv-DPUCAHX8H-r2.0.0.tar.gz
checksum: 328c818d8fd1202cd33459ed05d5684e
- name: inception_v3
type: xmodel
board: u50lv-DPUCAHX8H-DWC & u55c-DPUCAHX8H-DWC
download link: https://www.xilinx.com/bin/public/openDownload?filename=inception_v3-u55c-u50lv-DPUCAHX8H-DWC-r2.0.0.tar.gz
checksum: 97ec05163681e8b34cac193188480a2a
license: https://github.com/Xilinx/Vitis-AI/blob/master/LICENSE
|
---
algorithm:
class: SteadyState
population_size: 800
probabilities:
crossover: 0.9
mutation: 0.1
injection: 0.1
termination:
max_steps: 2000
on_individual: :stopping_condition
init:
method: ramped # grow or full or random or ramped
random_length: 8
sensible_depth: 5
inject: # alternative to a crossover or copy
method: grow # grow or full or random
random_length: 8
sensible_depth: 7
codon:
class: CodonGray
bit_size: 5
grammar:
class: Abnf::File
filename: sample/ant_trail_tcc/grammar.abnf
mapper:
class: BreadthFirst
track_support_on: true
selection:
class: Tournament
# proportional_by: fitness
tournament_size: 2
selection_rank: #optional
class: Ranking
replacement:
class: Tournament # Truncation
tournament_size: 10
replacement_rank:
class: Ranking
order_by: :fitness
direction: :minimize
crossover:
class: CrossoverLHS
mutation:
class: MutationBitStructural
store:
class: Store
filename: ./ant_steady_state_tcc.store
report:
class: PopulationReport
individual:
class: PipedIndividual
shorten_chromozome: true
_pareto:
:fitness: maximize
_pipe_output:
- :fitness: to_i
_thresholds:
:fitness: 89
_mark_phenotype: "\nMARKER\n"
evaluator:
class: WorkPipes
commands:
- 'ruby sample/ant_trail_tcc/ant_pipe.rb SINGLE sample/ant_trail_tcc/ant_evaluate.c'
# no need for establishing more than one pipe
# for the remote pipe configuration, use:
#
# - 'ssh user@host "ruby /full/path/to/geret/sample/ant_trail_tcc/ant_pipe.rb ID sample/ant_trail_tcc/ant_evaluate.c"'
#
# note the ssh connection must use public/private key pair (no password) for authentication.
# (see eg. http://www.petefreitag.com/item/532.cfm
# or http://www.debian-administration.org/article/SSH_with_authentication_key_instead_of_password )
|
<gh_stars>1-10
# RUN: not lld -flavor darwin -arch x86_64 -macosx_version_min 10.9 -flat_namespace -undefined error %s -o %t %p/Inputs/libSystem.yaml 2>&1 | FileCheck %s
--- !native
defined-atoms:
- name: _main
scope: global
content: [ E9, 00, 00, 00, 00 ]
alignment: 16
references:
- kind: branch32
offset: 1
target: _bar
undefined-atoms:
- name: _bar
# Make sure we error out for -flat_namespace -undefined error.
# CHECK: Undefined symbol: : _bar
|
name: 'Documentation'
on:
push:
pull_request:
workflow_dispatch:
jobs:
doxygen:
runs-on: ubuntu-latest
name: 'SW Framework'
steps:
- name: '🧰 Repository Checkout'
uses: actions/checkout@v2
- name: '🛠️ Modifying Doxyfile'
run: |
ls -al ./docs
sed -i 's/$(PWD)\/../$(GITHUB_WORKSPACE)/g' ./docs/Doxyfile
- name: '📚 Generate Doxygen Documentation'
uses: mattnotmitt/[email protected]
with:
working-directory: '.'
doxyfile-path: 'docs/Doxyfile'
- name: '📤 Upload Artifact'
uses: actions/upload-artifact@v2
with:
name: NEORV32-Doxygen
path: doxygen_build/html
asciidoctor:
runs-on: ubuntu-latest
name: 'Datasheet'
steps:
- name: '🧰 Repository Checkout'
uses: actions/checkout@v2
with:
fetch-depth: 0
- name: '🐍 Install doit'
run: pip install doit
- name: '📚 Build Datasheet and User Guide (PDF and HTML)'
run: ./do.py Documentation container
- name: '📤 Upload Artifact: HTML'
uses: actions/upload-artifact@v2
with:
name: NEORV32
path: docs/public
deploy:
if: github.event_name != 'pull_request' && (github.ref == 'refs/heads/master' || contains(github.ref, 'refs/tags/'))
needs:
- doxygen
- asciidoctor
runs-on: ubuntu-latest
name: 'Deploy to Releases and Pages'
steps:
- name: '🧰 Repository Checkout'
uses: actions/checkout@v2
- name: '📥 Download Artifacts'
uses: actions/download-artifact@v2
- name: '🛠️ Organise public subdir and create a tarball'
run: |
mv NEORV32 public
mv public/pdf ./
mv NEORV32-Doxygen public/sw
tar zvcf NEORV32-SITE-nightly.tar.gz -C public .
cd pdf
mv NEORV32.pdf NEORV32-nightly.pdf
mv NEORV32_UserGuide.pdf NEORV32_UserGuide-nightly.pdf
# Tagged: create a pre-release or a release (semver)
# Untagged: update the assets of pre-release 'nightly'
- name: '📦 Deploy to GitHub-Releases'
env:
GITHUB_TOKEN: ${{ github.token }}
run: |
gh release upload nightly NEORV32-SITE-nightly.tar.gz pdf/NEORV32*nightly.pdf --clobber
- name: '🐍 Install doit'
run: pip install doit
- name: '🚀 Deploy to GitHub-Pages'
run: ./do.py DeployToGitHubPages "update ${{ github.sha }}"
|
<filename>integration_files/SweRV_EH1/riscv_dv_extension/testlist.yaml<gh_stars>0
- test: cmark
c_tests: directed_tests/c/cmark.c
gcc_opts: -g -O3 -funroll-all-loops
iterations: 1
rtl_test: core_base_test_benchmark
- test: cmark_iccm
c_tests: directed_tests/c/cmark_iccm.c
gcc_opts: -g -O3 -funroll-all-loops
iterations: 1
rtl_test: core_base_test_benchmark
- test: c_sample
c_tests: directed_tests/c/c_sample.c
gcc_opts: -g -O3 -funroll-all-loops
iterations: 1
rtl_test: core_base_test_benchmark
- test: hello_world
asm_tests: directed_tests/asm/hello_world.s
iterations: 1
rtl_test: core_base_test_benchmark
- test: hello_world_dccm
asm_tests: directed_tests/asm/hello_world_dccm.s
iterations: 1
rtl_test: core_base_test_benchmark
- test: riscv_arithmetic_basic_test
description: >
Arithmetic instruction test, no load/store/branch instructions
gen_opts: >
+instr_cnt=5000
+num_of_sub_program=0
+directed_instr_0=riscv_int_numeric_corner_stream,4
+no_fence=1
+no_data_page=1
+no_branch_jump=1
+boot_mode=m
+no_csr_instr=1
iterations: 2
gen_test: riscv_instr_base_test
rtl_test: core_base_test
- test: riscv_load_store_instr_dccm_test
description: >
Random instruction stress test
iterations: 2
gen_test: riscv_instr_base_test
gen_opts: >
+instr_cnt=1000
+num_of_sub_program=0
+directed_instr_0=riscv_load_store_rand_instr_stream,4
rtl_test: core_base_test
- test: riscv_rand_instr_test
description: >
Random instruction stress test
iterations: 2
gen_test: riscv_instr_base_test
gen_opts: >
+instr_cnt=5000
+num_of_sub_program=5
+directed_instr_0=riscv_load_store_rand_instr_stream,4
+directed_instr_1=riscv_loop_instr,4
+directed_instr_2=riscv_hazard_instr_stream,4
+directed_instr_3=riscv_load_store_hazard_instr_stream,4
+directed_instr_4=riscv_multi_page_load_store_instr_stream,4
+directed_instr_5=riscv_mem_region_stress_test,4
+directed_instr_6=riscv_jal_instr,4
rtl_test: core_base_test
- test: riscv_jump_stress_test
description: >
Stress back-to-back jump instruction test
iterations: 2
gen_test: riscv_instr_base_test
gen_opts: >
+instr_cnt=5000
+num_of_sub_program=5
+directed_instr_1=riscv_jal_instr,20
rtl_test: core_base_test
- test: riscv_loop_test
description: >
Random instruction stress test
iterations: 2
gen_test: riscv_instr_base_test
gen_opts: >
+instr_cnt=10000
+num_of_sub_program=5
+directed_instr_1=riscv_loop_instr,20
rtl_test: core_base_test
- test: riscv_rand_jump_test
description: >
Jump among large number of sub-programs, stress testing iTLB operations.
iterations: 2
gen_test: riscv_instr_base_test
gen_opts: >
+instr_cnt=10000
+num_of_sub_program=10
+directed_instr_0=riscv_load_store_rand_instr_stream,8
rtl_test: core_base_test
# Please enable this test for your RTL simulation
- test: riscv_csr_test
description: >
Test all CSR instructions on all implemented CSR registers
iterations: 1
no_iss: 1
rtl_test: core_csr_test
no_post_compare: 1
- test: riscv_exceptions_test
description: >
Random instruction test with complete interrupt handling
iterations: 1
gen_test: riscv_rand_instr_test
gen_opts: >
+gen_exceptions=1
rtl_test: core_base_test
sim_opts: >
+enable_irq_seq=1
no_post_compare: 1
|
<reponame>codepainters/edalize<filename>.github/workflows/ci.yml
name: CI
on:
push:
pull_request:
# Run CI once a week even without code changes to ensure tests pass with
# updated dependencies.
schedule:
- cron: '0 0 * * 5'
# Allow triggering a CI run from the web UI.
workflow_dispatch:
jobs:
Format:
name: 🐍 Format
runs-on: ubuntu-latest
steps:
- name: 🧰 Checkout
uses: actions/checkout@v2
- name: 🐍 Setup Python ${{ matrix.pyver }}
uses: actions/setup-python@v2
with:
python-version: '3.10'
- name: 🚦 Run pre-commit Action
uses: pre-commit/[email protected]
Build:
strategy:
fail-fast: false
matrix:
os:
- { icon: 🐧, name: ubuntu }
#- { icon: 🍎, name: macos }
#- { icon: 🧊, name: windows }
pyver:
- '3.6'
- '3.7'
- '3.8'
- '3.9'
- '3.10'
runs-on: ${{ matrix.os.name }}-latest
name: ${{ matrix.os.icon }} ${{ matrix.os.name }} | ${{ matrix.pyver }}
steps:
- name: 🧰 Repository Checkout
uses: actions/checkout@v2
- name: 🐍 Set up Python ${{ matrix.pyver }}
uses: actions/setup-python@v1
with:
python-version: ${{ matrix.pyver }}
- name: 🛠️ Install dependencies
run: |
python -c "import sys; print(sys.version)"
pip install tox
- name: 🚧 Build package and run tests with tox
run: tox -e py
|
<filename>app/Tlut/configs/arch/arch_tlut_systolic_projection_bank16_block32.yml
# This file defines single architecture set for tlut systolic array performance projection
- proj_16_16_bank16_block32
- proj_32_32_bank16_block32
- proj_64_64_bank16_block32
- proj_128_128_bank16_block32
|
<reponame>SamuelRiedel/snitch
# Copyright 2020 ETH Zurich and University of Bologna.
# Solderpad Hardware License, Version 0.51, see LICENSE for details.
# SPDX-License-Identifier: SHL-0.51
package:
name: system-snitch-cluster
authors:
- <NAME> <<EMAIL>>
- <NAME> <<EMAIL>>
- <NAME> <<EMAIL>>
dependencies:
axi_riscv_atomics: {path: ../../vendor/pulp_platform_axi_riscv_atomics}
snitch-cluster: {path: ../../ip/snitch_cluster}
sources:
# Level 0:
- generated/snitch_cluster_wrapper.sv
# Level 1:
- target: any(simulation, verilator)
files:
- test/tb_memory.sv
- test/testharness.sv
# Level 2:
- target: test
files:
- test/tb_bin.sv
|
%YAML 1.2
---
Basis:
feature_pt_min_num_feat: 2000 # cv::calcOpticalFlowPyrLKで追跡された特徴点の数が
# この値より少なければFAST特徴点抽出による再計算を行う
vo_queue_depth: 5
vo_dtheta_valid_norm_threshold: 0.001 # 移動量がこの値以下であれば⊿θ = 0とする
Fast:
use_nms: 1 # 1: use, 0: not use
threshold: 20
OpticalFlow:
win_size: 21
tarm_criteria_max_count: 30
tarm_criteria_epsilon: 0.01
max_level: 3
min_eig_threshold: 0.001
# TODO: Calibrationファイル(.xml)を参照するように変更する
FivePt:
focal: 306.85128308786631 # Calibrationパラメータ fx
pp_x: 316.65578487129721 # Calibrationパラメータ cx
pp_y: 235.22872797028347 # Calibrationパラメータ cy
prob: 0.999
threshold: 1.0
|
<reponame>BearerPipelineTest/google-ctf<filename>2021/quals/rev-adspam/metadata.yaml
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Human readable task name
name: AdSpam
# Long form description.
description: |+
We've intercepted this demo build of a new ad spam bot, see if you can find anything interesting.
# The flag
flag: CTF{n0w_u_kn0w_h0w_n0t_t0_l1c3n53_ur_b0t}
# Task category. (one of hw, crypto, pwn, rev, web, net, misc)
category: reversing
# === the fields below will be filled by SRE or automation ===
# Task label
label: ''
# URL for web challenges
link: ''
# host/port for non-web challenges
host: 'adspam.2021.ctfcompetition.com 1337'
# the URL for attachments, to be filled in by automation
attachment: ''
# is this challenge released? Will be set by SREs
visible: false
|
<reponame>drhodes/jade2hdl
flags: {}
extra-package-dbs: []
packages:
- .
- ../rawr
extra-deps:
- hashids-1.0.2.3
- parsec3-numbers-0.1.0
- rawr-0.1.0
resolver: lts-9.4
|
<gh_stars>1-10
apb_i2c:
incdirs: [
.,
]
files: [
apb_i2c.sv,
i2c_master_bit_ctrl.sv,
i2c_master_byte_ctrl.sv,
i2c_master_defines.sv,
]
|
marble_sim:
stage: test
script:
- cd projects/test_marble_family && make all net_slave_check
.bitgen:
stage: synthesis
before_script:
- cd projects/test_marble_family && ls /non-free
artifacts:
name: "$CI_JOB_NAME-$CI_COMMIT_REF_NAME"
expire_in: 2 days
marble_mini_synth:
extends: .bitgen
script:
- PATH=$XILINX_VIVADO/bin:$PATH make marble1.bit
artifacts:
paths:
- projects/test_marble_family/marble*.bit
- projects/test_marble_family/vivado.log
- projects/test_marble_family/_xilinx/marble1.runs/impl_1/marble_top_utilization_placed.rpt
marble_mini_run:
stage: program
tags:
- deploy
dependencies:
- marble_mini_synth
before_script:
- cd projects/test_marble_family && ln -s marble1.*.bit marble1.bit
script:
- make marble1_hwload && sleep 8 && make marble1_hwtest
marble_v2_synth:
extends: .bitgen
script:
- PATH=$XILINX_VIVADO/bin:$PATH make marble2.bit
artifacts:
paths:
- projects/test_marble_family/marble*.bit
- projects/test_marble_family/vivado.log
- projects/test_marble_family/_xilinx/marble2.runs/impl_1/marble_top_utilization_placed.rpt
|
<filename>third_party/tests/ariane/tb/common_verification/Bender.yml
package:
name: common_verification
authors:
- "<NAME> <<EMAIL>>"
sources:
# Files in this package are meant for simulation only.
- target: simulation
files:
# Source files grouped in levels. Files in level 0 have no dependencies on files in this
# package. Files in level 1 only depend on files in level 0, files in level 2 on files in
# levels 1 and 0, etc. Files within a level are ordered alphabetically.
# Level 0
- src/clk_rst_gen.sv
- src/rand_id_queue.sv
- src/rand_stream_mst.sv
- src/rand_synch_holdable_driver.sv
- src/rand_verif_pkg.sv
- src/sim_timeout.sv
# Level 1
- src/rand_synch_driver.sv
# Level 2
- src/rand_stream_slv.sv
|
dist: bionic
language: cpp
compiler:
- gcc
addons:
apt:
packages:
- g++-8
before_install:
- wget https://github.com/bazelbuild/bazel/releases/download/1.1.0/bazel_1.1.0-linux-x86_64.deb
- sudo dpkg -i bazel_1.1.0-linux-x86_64.deb
- git clone https://github.com/google/verible.git verible
- cd verible && bazel build --noshow_progress --cxxopt='-std=c++17' //...
- cd -
script:
- printf "%s\n%s\n\n" "Verible commit:" "$(git --git-dir=verible/.git log -1 --pretty=oneline)"
- ./test_loop.sh
|
package:
name: cluster_peripherals
dependencies:
hci: { git: "https://github.com/pulp-platform/hci.git", version: "1.0" }
sources:
- cluster_control_unit/cluster_control_unit.sv
- include_dirs:
- event_unit/include
files:
# Level 0
- event_unit/HW_barrier_logic.sv
- event_unit/event_unit_arbiter.sv
- event_unit/event_unit_mux.sv
- event_unit/event_unit_sm.sv
- event_unit/interrupt_mask.sv
# Level 1
- event_unit/HW_barrier.sv
- event_unit/event_unit_input.sv
# Level 2
- event_unit/event_unit.sv
- icache_ctrl_unit/icache_ctrl_unit.sv
- icache_ctrl_unit/mp_icache_ctrl_unit.sv
- icache_ctrl_unit/mp_pf_icache_ctrl_unit.sv
- icache_ctrl_unit/new_icache_ctrl_unit.sv
- icache_ctrl_unit/pri_icache_ctrl_unit.sv
- icache_ctrl_unit/sp_icache_ctrl_unit.sv
- mmu_config_unit/mmu_config_unit.sv
- perf_counters_unit/perf_counters_unit.sv
- tcdm_pipe_unit/tcdm_pipe_unit.sv
|
<filename>.github/workflows/android.yml
name: Android CI
on:
push:
branches:
- master
- tree/*MAINT
pull_request:
branches:
- master
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v1
- name: set up JDK 11
uses: actions/setup-java@v1
with:
java-version: 11
- name: Copy dummy keys.txt
shell: bash
run: |
mkdir -p $GITHUB_WORKSPACE/../private_assets
cp $GITHUB_WORKSPACE/src/test/assets/keys.txt $GITHUB_WORKSPACE/../private_assets
- name: Build with Gradle
run: ./gradlew assembleCurrentDebug
- name: Run unit tests
run: ./gradlew testCurrentDebugUnitTest
- name: Upload Test Results
if: ${{ always() }}
uses: actions/upload-artifact@v2
with:
name: Test output
path: $GITHUB_WORKSPACE/build/reports/tests/testCurrentDebugUnitTest/
- name: Run lint
run: ./gradlew lint
|
<reponame>hzeller/Surelog
name: 'regression_on_demand'
on:
workflow_dispatch:
jobs:
linux-gcc:
runs-on: ubuntu-latest
defaults:
run:
shell: bash
steps:
- name: Install dependencies
run: |
sudo apt-get update -qq
sudo apt install -y \
g++-7 \
tclsh \
default-jre \
cmake \
build-essential \
swig \
google-perftools \
libgoogle-perftools-dev \
uuid-dev \
lcov
sudo ln -sf /usr/bin/g++-7 /usr/bin/g++
sudo ln -sf /usr/bin/gcc-7 /usr/bin/gcc
sudo ln -sf /usr/bin/gcov-7 /usr/bin/gcov
- uses: actions/setup-python@v2
with:
python-version: 3.8
- name: Checkout code
uses: actions/checkout@v2
with:
submodules: recursive
fetch-depth: 0
- name: Configure shell
run: |
echo 'CC=gcc-7' >> $GITHUB_ENV
echo 'CXX=g++-7' >> $GITHUB_ENV
echo 'PATH=/usr/lib/ccache:'"$PATH" >> $GITHUB_ENV
echo 'PREFIX=/tmp/surelog-install' >> $GITHUB_ENV
- name: Run Regression
run: |
env
which cmake && cmake --version
which make && make --version
which swig && swig -version
which java && java -version
which python && python --version
which ninja && ninja --version
which tclsh && echo 'puts [info patchlevel];exit 0' | tclsh
which $CC && $CC --version
which $CXX && $CXX --version
make regression
- name: Archive regression artifacts
if: always()
uses: actions/upload-artifact@v2
with:
name: surelog-linux-gcc-regression
path: |
${{ github.workspace }}/build/test/
${{ github.workspace }}/build/tests/
windows-msvc:
runs-on: windows-latest
defaults:
run:
shell: cmd
steps:
- name: Install Core Dependencies
run: |
choco install -y make
choco install -y swig --side-by-side --version=3.0.12
- name: Setup Python
uses: actions/setup-python@v2
with:
python-version: 3.8
architecture: x64
- name: Setup Java
uses: actions/setup-java@v1
with:
java-version: 1.8
java-package: jre
architecture: x64
- run: git config --global core.autocrlf input
shell: bash
- uses: actions/checkout@v2
with:
submodules: recursive
fetch-depth: 0
- name: Run Regression
run: |
call "C:\Program Files (x86)\Microsoft Visual Studio\2019\Enterprise\VC\Auxiliary\Build\vcvars64.bat"
set CMAKE_GENERATOR=Ninja
set CC=cl
set CXX=cl
set NO_TCMALLOC=On
set PREFIX=%GITHUB_WORKSPACE%\install
set CPU_CORES=%NUMBER_OF_PROCESSORS%
set MAKE_DIR=C:\make\bin
set TCL_DIR=%PROGRAMFILES%\Git\mingw64\bin
set SWIG_DIR=%PROGRMDATA%\chocolatey\lib\swig\tools\install\swigwin-3.0.12
set PATH=%pythonLocation%;%SWIG_DIR%;%JAVA_HOME%\bin;%MAKE_DIR%;%TCL_DIR%;%PATH%
set
where cmake && cmake --version
where make && make --version
where swig && swig -version
where java && java -version
where python && python --version
where ninja && ninja --version
make regression
- name: Archive regression artifacts
if: always()
uses: actions/upload-artifact@v2
with:
name: surelog-windows-msvc-regression
path: |
${{ github.workspace }}/build/test/
${{ github.workspace }}/build/tests/
|
<filename>.travis.yml
sudo: enabled
language: cpp
env:
global:
- secure: "<KEY>
before_script:
- echo -n | openssl s_client -connect scan.coverity.com:443 | sed -ne '/-BEGIN CERTIFICATE-/,/-END CERTIFICATE-/p' | sudo tee -a /etc/ssl/certs/ca-
- ./src/runtime_src/tools/scripts/xrtdeps.sh
- cd build
before_install:
- sudo apt-get update
addons:
coverity_scan:
project:
name: "Xilinx/XRT"
description: "Xilinx Runtime"
notification_email: <EMAIL>
build_command: "./build.sh"
branch_pattern: master
script: if [ "$COVERITY_SCAN_BRANCH" != 1 ]; then ./build.sh ; fi
|
register_maps:
- ./test_map_1.yaml
- ./test_map_2.yaml
output_generators:
- vhdl_package
- c_header
output_paths:
default: ./default_outputs
c_header: ./c_outputs/
|
<filename>examples/heart/main.yaml
name: heart
system:
outputs:
SA_v: REAL
RA_v: REAL
OS_v: REAL
Fast_v: REAL
AV_v: REAL
His_v: REAL
RBB_v: REAL
RVA_v: REAL
RV_v: REAL
definitions:
Cell: !include cell.yaml
Path: !include path.yaml
instances:
!include cells.yaml
!include paths.yaml
mappings:
!include mappings.yaml
codegenConfig:
execution:
stepSize: 0.00001
simulationTime: 10
logging:
enable: true
file: out.csv
fields:
- SA_v
- RA_v
- OS_v
- Fast_v
- AV_v
- His_v
- RBB_v
- RVA_v
- RV_v
parametrisationMethod: COMPILE_TIME
maximumInterTransitions: 1
requireOneIntraTransitionPerTick: false
|
variables:
build_num: $(Build.BuildNumber)
jobs:
- job: PyPi_Linux
pool:
vmImage: 'ubuntu-18.04'
steps:
- script: |
python -m pip install wheel twine
displayName: 'Install wheel+twine'
- script: |
docker pull quay.io/pypa/manylinux2010_x86_64
docker build -t pybfms .
displayName: 'Setup Docker Image'
- script: |
docker run -e BUILD_NUM=$(Build.BuildNumber) -v $(Build.Repository.LocalPath):/pybfms pybfms
displayName: 'Build Wheel'
- task: TwineAuthenticate@1
condition: eq(variables['Build.SourceBranchName'], 'master')
inputs:
pythonUploadServiceConnection: pypi-pybfms
- script: |
# Only deploy from master
if test "$(Build.SourceBranchName)" = "master"; then
python -m twine --version
echo "Calling twine"
python -m twine upload -r pybfms --config-file $(PYPIRC_PATH) result/wheelhouse/* result/dist/*.tar.gz
echo "Calling twine complete"
fi
displayName: 'Upload to PyPi'
|
# Copyright 2021 ETH Zurich and University of Bologna.
# Licensed under the Apache License, Version 2.0, see LICENSE for details.
# SPDX-License-Identifier: Apache-2.0
---
address:
# Snitch: cl_peripherals + 0x40
barrier_reg: 0x120040
cluster_base_hartid: 0x40000040
cluster_id: 0x40000050
cluster_num: 0x40000048
nr_cores: 0x40000010
# Snitch/Occamy: SOC_CTRL + 0x14
# in Banshee also used as exit code location
scratch_reg: 0x02000014
tcdm_end: 0x40000008
tcdm_start: 0x40000000
uart: 0xF00B8000
wakeup_reg: 0x40000028
# Snitch: Bootdata.clint_base
clint: 0xFFFF0000
# Snitch: cl_peripherals + 0x30
cl_clint: 0x120030
architecture:
num_cores: 9
num_clusters: 1
base_hartid: 0
bootrom:
callbacks:
- name: bootrom-cluster
size: 0x34
end: 0x01020000
latency: 5
start: 0x01000000
memory:
- dram:
end: 0xF0000000
latency: 4
start: 0x80000000
ext_tcdm: []
periphs:
callbacks:
- name: zero-memory
size: 64000
end: 0x140000
latency: 2
start: 0x130000
tcdm:
end: 0x120000
latency: 1
start: 0x100000
|
# .goreleaser.yml
# Build customization
builds:
- main: ./cmd/riscv-as/main.go
binary: riscv-as
goos:
- windows
- darwin
- linux
- main: ./cmd/riscv-tb/main.go
binary: riscv-tb
goos:
- windows
- darwin
- linux
archive:
files:
- examples/*
|
---
name: template
board: boards/cora07s
version: 0.1.1
cores:
- fpga/cores/axi_ctl_register_v1_0
- fpga/cores/dna_reader_v1_0
- fpga/cores/axi_sts_register_v1_0
memory:
- name: control
offset: '0x60000000'
range: 4K
- name: status
offset: '0x50000000'
range: 4K
- name: led0
offset: '0x40001000'
range: 4K
- name: led1
offset: '0x40002000'
range: 4K
- name: pmod0
offset: '0x40003000'
range: 4K
- name: pmod1
offset: '0x40004000'
range: 4K
- name: ck_iic
offset: '0x40005000'
range: 4K
- name: ck_spi
offset: '0x40006000'
range: 4K
- name: xadc
offset: '0x40007000'
range: 4K
control_registers:
- led
status_registers:
- forty_two
parameters:
fclk0: 100000000 # FPGA clock speed in Hz
xdc:
- boards/cora07s/config/ports.xdc
- boards/cora07s/config/ck-analogue.xdc
- ./constraints.xdc
drivers:
- server/drivers/common.hpp
#- boards/cora07s/drivers/xadc.hpp
#- boards/cora07s/drivers/pmod.hpp
- ./template.hpp
web:
- web/koheron.ts
- web/main.css
|
hwpe-ctrl:
commit: ec068eb
hwpe-stream:
group: bigcola317
commit: mac-single-tcdm
tech_cells_generic:
commit: b35652608124b7ea813818b14a00ca76edd7599d
|
<filename>Gathered CTF writeups/2017-06-17-googlectf/a7_gee_cue_elle/app.yaml
service: anon2-lmuwucba5we9gi5a
runtime: python27
api_version: 1
threadsafe: true
handlers:
- url: /login
script: main.app
- url: /
static_files: index.html
upload: index.html
- url: /(.+)
static_files: \1
upload: .*[.](?!py$).*
libraries:
- name: webapp2
version: latest
- name: jinja2
version: latest
skip_files:
- ^(.*/)?\.bak$
|
theme: jekyll-theme-dinky
title: RV12 RISC-V CPU Core
description: Single-issue, single-core RV32I, RV64I compliant RISC-V CPU
show_downloads: true
show_license: true
license: Non-Commercial License
|
%YAML 1.2
---
Motor:
one_rotation_pulse: 630 # 1回転あたりのロータリーエンコーダのパルス数
one_rotation_dist: 0.233 # 1回転で進む距離(m)
tire_tread: 0.17 # タイヤ間の距離(m)
|
<reponame>mfkiwl/snitch
package:
name: lowrisc_prim
description: "lowRISC RTL primitives"
authors: ["lowRISC Contributors"]
# Used for implementation of primitive cells
dependencies:
common_cells: {path: ../../pulp_platform_common_cells}
sources:
# Patched in
- prim_pulp_platform/prim_flop_2sync.sv
# Level 0
- rtl/prim_util_pkg.sv
# Level 1
- rtl/prim_sync_reqack.sv
- rtl/prim_sync_reqack_data.sv
- rtl/prim_pulse_sync.sv
- rtl/prim_packer_fifo.sv
- rtl/prim_fifo_sync.sv
- rtl/prim_filter_ctr.sv
- rtl/prim_intr_hw.sv
# Level 2
- rtl/prim_fifo_async.sv
|
# Run in container-based environment
sudo: required
dist: trusty
group: edge # Add this
git:
submodules: false
# Using the precompiled toolchain from docker
services:
- docker
# define test
env:
global:
- DOCKER_TOP=/opt/lowrisc
- DOCKER_TAG=rocket-v1.0
matrix:
- CONFIG=LoRCDefaultConfig TEST_CASE=run-asm-tests
# tag is not enabled for now
# - CONFIG=TagConfig TEST_CASE=run-asm-tests
# - CONFIG=TagConfig TEST_CASE=run-tag-tests
# actual test build
before_install:
- git submodule update --init --recursive rocket-chip
- docker pull lowrisc/lowrisc-docker:$DOCKER_TAG
before_script:
- docker create -v $PWD:/opt/lowrisc -e TOP=/opt/lowrisc -t --name=test lowrisc/lowrisc-docker:$DOCKER_TAG
- docker start test
script:
- docker exec test make -C $DOCKER_TOP/rocket-chip/vsim CONFIG=$CONFIG $TEST_CASE
after_script:
- docker stop test
- docker rm test
|
<gh_stars>100-1000
package:
name: axi_slice
dependencies:
common_cells: { git: "https://github.com/pulp-platform/common_cells.git", version: 1.7.4 }
sources:
- src/axi_single_slice.sv
- src/axi_ar_buffer.sv
- src/axi_aw_buffer.sv
- src/axi_b_buffer.sv
- src/axi_r_buffer.sv
- src/axi_slice.sv
- src/axi_w_buffer.sv
- src/axi_slice_wrap.sv
|
<filename>src_files.yml<gh_stars>1-10
apb_event_unit:
incdirs: [
./include/,
]
files: [
apb_event_unit.sv,
generic_service_unit.sv,
sleep_unit.sv,
]
|
language: python
install:
- pip install tox
script:
- tox -e $BUILD_NAME
matrix:
include:
- env: BUILD_NAME=py27-unit
python: '2.7'
- env: BUILD_NAME=py33-unit
python: '3.3'
- env: BUILD_NAME=py34-unit
python: '3.4'
- env: BUILD_NAME=py35-unit
python: '3.5'
- env: BUILD_NAME=py36-unit
python: '3.6'
- env: BUILD_NAME=py27-lint
python: '2.7'
- env: BUILD_NAME=py36-lint
python: '3.6'
- env: BUILD_NAME=py27-docs
python: '2.7'
- env: BUILD_NAME=py36-docs
python: '3.6'
# Python 2.7 with ghdl mcode
- env: BUILD_NAME=py27-acceptance-ghdl
python: '2.7'
os: linux
sudo: false
addons:
apt:
packages:
- gnat
before_script:
- git clone --depth 1 https://github.com/tgingold/ghdl.git ghdl
- cd ghdl
- mkdir build-mcode
- cd build-mcode
- ../configure --prefix=../../install-ghdl-mcode/
- make
- make install
- cd ../../
- export PATH=$PATH:install-ghdl-mcode/bin/
# Python 3.6 with ghdl llvm
- env: BUILD_NAME=py36-acceptance-ghdl
python: '3.6'
os: linux
sudo: required
dist: trusty
before_install:
- sudo apt-get update -qq
- sudo apt-get install -y gnat-4.8 zlib1g-dev
- sudo apt-get install -y llvm-3.5-dev llvm-3.5-tools libedit-dev
before_script:
- git clone --depth 1 https://github.com/tgingold/ghdl.git ghdl
- cd ghdl
- mkdir build-llvm
- cd build-llvm
- ../configure --prefix=../../install-ghdl-llvm/ --with-llvm-config=llvm-config-3.5
- make
- make install
- cd ../../
- export PATH=$PATH:install-ghdl-llvm/bin/
# Deploy to GitHub pages
- stage: deploy
python: '3.6'
script:
- tox -e py36-docs
- touch .tox/py36-docs/tmp/docsbuild/.nojekyll
deploy:
provider: pages
repo: VUnit/VUnit.github.io
target_branch: master
local_dir: .tox/py36-docs/tmp/docsbuild/
# This environment variable is set to an OAuth token in travis vunit settings
github_token: $GITHUB_PAGES_TOKEN
skip_cleanup: true
on:
branch: master
# Deploy to PyPI whenever the package version has changed
# When a package version has not changed a new upload will not be triggered
- stage: deploy
python: '3.6'
script:
- sed -i "s/PRE_RELEASE = True/PRE_RELEASE = False/" vunit/about.py
deploy:
provider: pypi
distributions: sdist
skip_cleanup: true
skip_upload_docs: true
user: $PYPI_USER
password: <PASSWORD>
on:
branch: master
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.