Search is not available for this dataset
content
stringlengths 0
376M
|
---|
<reponame>ess-dmsc/dmg-build-scripts<gh_stars>0
---
- hosts: data-generators
gather_facts: False
tasks:
- name: start loki data
command: "{{daemonize_cmd}} {{script_path}}/datagen_loki.bash"
tags:
- loki
|
<reponame>ejangelico/cryo-on-epix-hr-dev<gh_stars>1-10
cryoAsicGen1:
enable: True
ForceWrite: False
InitAfterConfig: False
PollEn: True
dataWriter:
enable: True
DataFile: ''
IsOpen: False
BufferSize: 0
MaxFileSize: 0
EpixHRGen1Cryo:
enable: True
MMCMSerdesRegisters:
enable: True
CLKOUT0PhaseMux: 0
CLKOUT0HighTime: 2
CLKOUT0LowTime: 2
CLKOUT0Frac: 0
CLKOUT0FracEn: 0
CLKOUT0Edge: 0
CLKOUT0NoCount: 0
CLKOUT0DelayTime: 0
CLKOUT1PhaseMux: 0
CLKOUT1HighTime: 8
CLKOUT1LowTime: 8
CLKOUT1Edge: 0
CLKOUT1NoCount: 0
CLKOUT1DelayTime: 0
CLKOUT2PhaseMux: 0
CLKOUT2HighTime: 14
CLKOUT2LowTime: 14
CLKOUT2Edge: 0
CLKOUT2NoCount: 0
CLKOUT2DelayTime: 0
CLKOUT3PhaseMux: 0
CLKOUT3HighTime: 2
CLKOUT3LowTime: 2
CLKOUT3Edge: 0
CLKOUT3NoCount: 0
CLKOUT3DelayTime: 0
|
<reponame>Malcolmnixon/FpgaDemo<filename>.github/workflows/vunit.yml
name: CI/VUnit
on:
push:
branches: [ master ]
pull_request:
branches: [ master ]
jobs:
build:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v2
- uses: ghdl/setup-ghdl-ci@nightly
with:
backend: llvm
- name: Setup Python
uses: actions/setup-python@v2
- name: Install VUnit
run: python -m pip install vunit_hdl
- name: VUnit Utility
run: python run_vunit.py
|
<reponame>icgrp/doblink
"7010":
arch: "zynq7"
device_family: "xc7z010"
device_name: "fig1"
device_speed: "clg400-1"
device: "xc7z010-fig1-roi-virt"
board: "zybo"
timeout: 200
|
GitBase: ..
TopRoguePackage: cameralink_gateway
RoguePackages:
- submodules/axi-pcie-core/python
- submodules/clink-gateway-fw-lib/python
- submodules/l2si-core/python
- submodules/lcls2-pgp-fw-lib/python
- submodules/lcls-timing-core/python
- submodules/surf/python
- python
RogueConfig:
- ../software/config
RogueScripts:
- submodules/axi-pcie-core/scripts/updatePcieFpga
- submodules/clink-gateway-fw-lib/scripts/updateFeb
- ../software/scripts/devGui
Targets:
ClinkFeb:
ImageDir: targets/ClinkFeb/images
Extensions:
- mcs
ClinkSlacPgpCardG3Pgp4:
ImageDir: targets/ClinkSlacPgpCardG3Pgp4/images
Extensions:
- mcs
ClinkSlacPgpCardG4Pgp2b:
ImageDir: targets/ClinkSlacPgpCardG4Pgp2b/images
Extensions:
- mcs
ClinkSlacPgpCardG4Pgp4:
ImageDir: targets/ClinkSlacPgpCardG4Pgp4/images
Extensions:
- mcs
ClinkKcu1500Pgp2b:
ImageDir: targets/ClinkKcu1500Pgp2b/images
Extensions:
- mcs
ClinkKcu1500Pgp4:
ImageDir: targets/ClinkKcu1500Pgp4/images
Extensions:
- mcs
Releases:
cameralink_gateway:
Primary: True
Targets:
- ClinkFeb
- ClinkSlacPgpCardG3Pgp4
- ClinkSlacPgpCardG4Pgp2b
- ClinkSlacPgpCardG4Pgp4
- ClinkKcu1500Pgp2b
- ClinkKcu1500Pgp4
Types:
- Rogue
|
<filename>.gitlab-ci.yml
firmware:
image:
name: docker.i74.de:5000/gcvideo-build-system
stage: build
before_script:
- source /opt/Xilinx/14.7/ISE_DS/settings64.sh
- export PATH=$PATH:/root/zpugcc/toolchain/install/bin
script:
- HDL/gcvideo_dvi/build-all.sh
artifacts:
paths:
- HDL/gcvideo_dvi/binaries/*.zip
tags:
- xilinx-ise
|
# Human readable task name
name: Atheris
# Long form description.
description: |+
https://pypi.org/project/atheris/
# The flag
flag: CTF{atheris-hispida-is-the-closest-thing-that-exists-to-a-fuzzy-python}
# Task category. (one of hw, crypto, pwn, rev, web, net, misc)
category: pwn
# === the fields below will be filled by SRE or automation ===
# Task label
label: ''
# URL for web challenges
link: ''
# host/port for non-web challenges
host: 'atheris.2021.ctfcompetition.com 1337'
# the URL for attachments, to be filled in by automation
attachment: ''
# is this challenge released? Will be set by SREs
visible: true
|
%YAML 1.2
---
ratio: 0.00473758865248 # 実空間における地図画像1pixあたりの大きさ(メートル)
file_path_information: # 各種環境情報
edge_map: "/data/RoutePlanner/edge.png"
line_map: "/data/RoutePlanner/line.png"
paths: "/data/RoutePlanner/paths.yaml"
path_connection: "/data/RoutePlanner/path_connection.yaml"
route: "/data/RoutePlanner/route/mini.yaml"
traffic_light: "/data/RoutePlanner/traffic_light.yaml"
|
<filename>ci/vivado-builds.yml
build:enclustra_ax3_pm3_a35__2018.3:
extends: .template_vivado_build
variables:
VIVADO_VERSION: "2018.3"
dependencies:
- quick_check:enclustra_ax3_pm3_a35__2018.3
build:enclustra_ax3_pm3_a35__2019.2:
extends: .template_vivado_build
variables:
VIVADO_VERSION: "2019.2"
dependencies:
- quick_check:enclustra_ax3_pm3_a35__2019.2
build:enclustra_ax3_pm3_a50__2018.3:
extends: .template_vivado_build
variables:
VIVADO_VERSION: "2018.3"
dependencies:
- quick_check:enclustra_ax3_pm3_a50__2018.3
build:enclustra_ax3_pm3_a50__2019.2:
extends: .template_vivado_build
variables:
VIVADO_VERSION: "2019.2"
dependencies:
- quick_check:enclustra_ax3_pm3_a50__2019.2
build:kc705_basex__2018.3:
extends: .template_vivado_build
variables:
VIVADO_VERSION: "2018.3"
dependencies:
- quick_check:kc705_basex__2018.3
build:kc705_basex__2019.2:
extends: .template_vivado_build
variables:
VIVADO_VERSION: "2019.2"
dependencies:
- quick_check:kc705_basex__2019.2
build:kc705_gmii__2018.3:
extends: .template_vivado_build
variables:
VIVADO_VERSION: "2018.3"
dependencies:
- quick_check:kc705_gmii__2018.3
build:kc705_gmii__2019.2:
extends: .template_vivado_build
variables:
VIVADO_VERSION: "2019.2"
dependencies:
- quick_check:kc705_gmii__2019.2
build:kcu105_basex__2018.3:
extends: .template_vivado_build
variables:
VIVADO_VERSION: "2018.3"
dependencies:
- quick_check:kcu105_basex__2018.3
build:kcu105_basex__2019.2:
extends: .template_vivado_build
variables:
VIVADO_VERSION: "2019.2"
dependencies:
- quick_check:kcu105_basex__2019.2
build:zcu102_basex__2018.3:
extends: .template_vivado_build
variables:
VIVADO_VERSION: "2018.3"
dependencies:
- quick_check:zcu102_basex__2018.3
build:zcu102_basex__2019.2:
extends: .template_vivado_build
variables:
VIVADO_VERSION: "2019.2"
dependencies:
- quick_check:zcu102_basex__2019.2
build:zcu102_c2c_loopback__2019.1:
extends: .template_vivado_build
variables:
VIVADO_VERSION: "2019.1"
dependencies:
- quick_check:zcu102_c2c_loopback__2019.1
build:k800__2018.3:
extends: .template_vivado_build
variables:
VIVADO_VERSION: "2018.3"
dependencies:
- quick_check:k800__2018.3
build:k800__2019.2:
extends: .template_vivado_build
variables:
VIVADO_VERSION: "2019.2"
dependencies:
- quick_check:k800__2019.2
build:vcu118_pcie__2018.3:
extends: .template_vivado_build
variables:
VIVADO_VERSION: "2018.3"
dependencies:
- quick_check:vcu118_pcie__2018.3
build:vcu118_pcie__2019.2:
extends: .template_vivado_build
variables:
VIVADO_VERSION: "2019.2"
dependencies:
- quick_check:vcu118_pcie__2019.2
build:vcu118_sgmii__2018.3:
extends: .template_vivado_build
variables:
VIVADO_VERSION: "2018.3"
dependencies:
- quick_check:vcu118_sgmii__2018.3
build:vcu118_sgmii__2019.2:
extends: .template_vivado_build
variables:
VIVADO_VERSION: "2019.2"
dependencies:
- quick_check:vcu118_sgmii__2019.2
|
image: python:3.7-alpine
run:
script:
- python3 setup.py bdist_wheel
artifacts:
paths:
- dist/*.whl
pages:
stage: deploy
script:
- pip install -U sphinx
- pip install -U sphinx-rtd-theme
- pip install dist/*.whl
- sphinx-build -b html doc/source/ public
artifacts:
paths:
- public
only:
- main
|
<reponame>Datum-Technology-Corporation/core-v-mcu
name: Retrieve CI result from GitLab
on:
push:
branches-ignore:
- gh-pages # deployment target branch (this workflow should not exist on that branch anyway)
- v** # such branch names conflict with tags
pull_request:
branches-ignore:
- gh-pages # deployment target branch (this workflow should not exist on that branch anyway)
- v** # such branch names conflict with tags
jobs:
gitlab-ci:
if: github.repository == 'pulp-platform/axi' # do not run this job on forks (because Gitlab CI
runs-on: ubuntu-latest # will not trigger on forks)
timeout-minutes: 190
steps:
- name: Checkout
uses: actions/checkout@v2
with:
persist-credentials: false
# Checkout pull request HEAD commit instead of merge commit, because CI runs against HEAD
# commit.
ref: ${{ github.event.pull_request.head.sha }}
- name: Wait for synchronization (every 5 minutes)
run: |
while [ $(($(date -d "+1 minute" +%-M) % 5)) -ne 0 ]; do
# "+1 minute" because if the current minute is divisible by 5, we likely already missed
# the synchronization.
sleep 10
done
sleep 90 # the minute above plus 30 seconds to leave some time for the synchronization
shell: bash
- name: Obtain CI result
run: |
if [ "$GITHUB_EVENT_NAME" == "pull_request" ]; then
BRANCH_NAME="$GITHUB_HEAD_REF"
elif [ "$GITHUB_EVENT_NAME" == "push" ]; then
if echo $GITHUB_REF | grep -qE '^refs/heads'; then
BRANCH_NAME="$(echo $GITHUB_REF | cut -d '/' -f3-)"
else
echo "Error: Could not derive branch name from ref '$GITHUB_REF'!"
exit 1
fi
else
echo "Error: Unsupported event: '$GITHUB_EVENT_NAME'!"
exit 1
fi
while true; do
resp="$(curl --fail --silent --show-error \
https://akurth.net/usrv/ig/shields/pipeline/akurth/axi/$BRANCH_NAME)"
if [ $? -ne 0 ]; then
echo "Error: Failed to obtain CI status!"
exit 1
fi
status="$(echo $resp | jq -r .message)"
if [ "$status" == "passing" ]; then
sha="$(echo $resp | jq -r .sha)"
if [ "$GITHUB_EVENT_NAME" == "pull_request" ]; then
github_sha="$(cat "$GITHUB_EVENT_PATH" | jq -r .pull_request.head.sha)"
else
github_sha="$GITHUB_SHA"
fi
if [ "$sha" == "$github_sha" ]; then
echo "CI passed."
exit 0
else
echo "Error: CI passed, but on a different SHA: '$sha'!"
exit 1
fi
elif [ "$status" == "running" ]; then
echo "CI is running, waiting .."
else
echo "Error: Unknown or failing status: '$status'!"
exit 1
fi
sleep 10
done
shell: bash
|
<reponame>StanfordVLSI/dragonphy2
# Adapted from Garnet
name: constraints
commands:
- python gen_constraints.py
outputs:
- constraints.tcl
parameters:
# Name of the design
design_name: undefined
# Name of the ADK
adk_name: tsmc16
# Period of the main clock in nanoseconds
# (will be scaled by constr_time_scale)
constr_main_per: 0.9
# Scale factor for timing constraints
constr_time_scale: 1.0
# Scale factor for capacitance constraints
constr_cap_scale: 1.0
|
<reponame>UNO-NULLify/eCTF20
exclude:
- /getting_started.md
- /package-lock.json
- /Vagrantfile
- /devserver.md
- /LICENSE
- /package.json
- /README.md
- /boot-image/.*
- /sample-audio/.*
- /node_modules/.*
- /vivado-boards/.*
- /docs/.*
- /vagrant/.*
- /pl/.*
- /mb/Cora-Z7-07S/.*
- /mb/drm_audio_fw_bsp/.*
- /mb/README.md
- /mb/system_wrapper.hdf
languages:
- cpp
- python
- script
|
# File auto-generated by Padrick 0.1.0.post0.dev40+g68903cf
package:
name: axi_tlb
authors:
- "<NAME> <<EMAIL>>"
- "<NAME> <<EMAIL>>"
dependencies:
register_interface: { git: "https://github.com/pulp-platform/register_interface.git", version: 0.3.1 }
common_cells: { git: "https://github.com/pulp-platform/common_cells.git", version: 1.21.0 }
common_verification: { git: "https://github.com/pulp-platform/common_verification.git", version: 0.2.0 }
axi: { git: "https://github.com/pulp-platform/axi.git", version: "0.29.1" }
sources:
- src/axi_tlb_l1.sv
- src/axi_tlb.sv
|
udma_i2c:
files: [
rtl/udma_i2c_reg_if.sv,
rtl/udma_i2c_bus_ctrl.sv,
rtl/udma_i2c_control.sv,
rtl/udma_i2c_top.sv,
]
|
<gh_stars>100-1000
# Copyright 2020 ETH Zurich and University of Bologna.
# Solderpad Hardware License, Version 0.51, see LICENSE for details.
# SPDX-License-Identifier: SHL-0.51
package:
name: test
authors:
- <NAME> <<EMAIL>>
- <NAME> <<EMAIL>>
dependencies:
axi: {path: ../../vendor/pulp_platform_axi}
reqrsp_interface: {path: ../reqrsp_interface}
axi_riscv_atomics: {path: ../../vendor/pulp_platform_axi_riscv_atomics}
sources:
# Level 1:
- target: any(simulation, verilator)
files:
- src/tb_memory_regbus.sv
# Level 2
- src/tb_memory_axi.sv
# Level 3:
- target: test
files:
- src/tb_bin.sv
|
name: Verible formatter
on:
pull_request:
jobs:
format:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@master
- uses: chipsalliance/verible-formatter-action@main
with:
parameters:
--indentation_spaces 4
--module_net_variable_alignment=preserve
--case_items_alignment=preserve
github_token: ${{ secrets.GITHUB_TOKEN }}
|
name: build
on: push
jobs:
build:
runs-on: ubuntu-latest
strategy:
matrix:
python-version: [3.7, 3.8, 3.9]
steps:
- name: Checkout
uses: actions/[email protected]
with:
fetch-depth: 0
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python-version }}
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install -r requirements_test.txt
- name: Test
run: |
coverage run -m pytest -k 'not shell'
coverage xml -i
- name: fix code coverage paths
run: |
sed -i 's/\/home\/runner\/work\/sonar\/sonar\//\/github\/workspace\//g' coverage.xml
- name: SonarCloud Scan
uses: SonarSource/[email protected]
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
- name: Codecov
uses: codecov/[email protected]
with:
# Path to coverage file to upload
file: coverage.xml
# Specify whether or not CI build should fail if Codecov runs into an error during upload
fail_ci_if_error: true
|
<gh_stars>10-100
# Huh this worked
env:
AFAR: boo
# agents: { queue: "papers" }
agents: { queue: "hours" }
steps:
- label: label1
# agents: { queue: "papers" }
commands:
- set -x
- echo hye
- export AFOO=bar
- ABOO=har
- echo AFOO=$$AFOO ABOO=$$ABOO AFAR=$$AFAR
- printenv | sort | head
- wait: { continue_on_failure: true } # One step at a time + continue on failure
- label: label2
commands:
- echo AFOO=$$AFOO ABOO=$$ABOO AFAR=$$AFAR
- printenv | sort | head
|
<filename>info.yaml<gh_stars>0
---
project:
description: "A simple RISC-V core+peripherals subsystem for the Google-sponsored Open MPW shuttles for SKY130."
foundry: "SkyWater"
git_url: "https://github.com/mballance/caravel_fwpayload.git"
organization: "<NAME>"
organization_url: "http://github.com/mballance"
owner: "<NAME>"
process: "SKY130"
project_name: "FWPayload"
tags:
- "Open MPW"
- "FWPayload"
category: "Processor"
top_level_netlist: "verilog/gl/caravel.v"
user_level_netlist: "verilog/gl/user_project_wrapper.v"
version: "1.00"
cover_image: "doc/images/fwpayload_diagram.png"
|
# For Windows based CI
environment:
matrix:
# For Python versions available on Appveyor, see
# http://www.appveyor.com/docs/installed-software#python
# The list here is complete (excluding Python 2.6, which
# isn't covered by this document) at the time of writing.
- BUILD_NAME: py27-unit
PYTHON: "C:\\Python27"
- BUILD_NAME: py33-unit
PYTHON: "C:\\Python33"
- BUILD_NAME: py34-unit
PYTHON: "C:\\Python34"
- BUILD_NAME: py35-unit
PYTHON: "C:\\Python35"
- BUILD_NAME: py36-unit
PYTHON: "C:\\Python36"
- BUILD_NAME: py27-lint
PYTHON: "C:\\Python27"
- BUILD_NAME: py36-lint
PYTHON: "C:\\Python36"
- BUILD_NAME: py27-docs
PYTHON: "C:\\Python27"
- BUILD_NAME: py36-docs
PYTHON: "C:\\Python36"
- BUILD_NAME: py27-acceptance-ghdl
PYTHON: "C:\\Python27"
- BUILD_NAME: py36-acceptance-ghdl
PYTHON: "C:\\Python36"
install:
- "git submodule update --init --recursive"
- "%PYTHON%\\python.exe -m pip install tox"
- "curl -fsSL -o ghdl.zip https://github.com/tgingold/ghdl/releases/download/2017-03-01/ghdl-0.34-dev-mcode-2017-03-01-win32.zip"
- "7z x ghdl.zip -oghdl -y"
- "set PATH=%PATH%;ghdl/bin"
build: off
test_script:
- "%PYTHON%\\python.exe -m tox -e %BUILD_NAME%"
|
---
- hosts: data-generators
gather_facts: False
tasks:
- name: check if we can ping the other servers with correct MTU
command: "~/deployment/event-formation-unit/util/hwcheck/pingcheck.bash 9000"
|
<reponame>f110/wing
language: go
sudo: false
go:
- 1.10.x
- 1.11.x
- 1.12.x
- 1.13.x
- 1.14.x
- 1.15.x
cache:
directories:
- $HOME/.cache/go-build
- $HOME/gopath/pkg/mod
env:
global:
- GO111MODULE=on
before_install:
- go get github.com/mattn/goveralls
- go get golang.org/x/tools/cmd/cover
- go get golang.org/x/tools/cmd/goimports
- go get golang.org/x/lint/golint
script:
- gofiles=$(find ./ -name '*.go') && [ -z "$gofiles" ] || unformatted=$(goimports -l $gofiles) && [ -z "$unformatted" ] || (echo >&2 "Go files must be formatted with gofmt. Following files has problem:\n $unformatted" && false)
- golint ./... # This won't break the build, just show warnings
- $HOME/gopath/bin/goveralls -service=travis-ci
|
<reponame>icgrp/doblink
# @package _global_
defaults:
- override /do_blink/fig1a@do_blink.fig.fig1a: ["7010", "7020", "at200"]
- override /do_blink/fig2a@do_blink.fig.fig2a: []
- override /do_blink/spam_filter@do_blink.fig.spam_filter: []
- override /do_blink/digit_recognition@do_blink.fig.digit_recognition: []
- override /do_blink/rendering@do_blink.fig.rendering: []
- override /hydra/sweeper: optuna
- override /hydra/launcher: submitit_slurm
hydra:
launcher:
cpus_per_task: 8
mem_per_cpu: 7500mb
nodes: 1
sweeper:
optuna_config:
direction: minimize
study_name: do_blink_fig1a
storage: sqlite:///optuna.db
n_trials: 600
n_jobs: 60
sampler: tpe
seed: 123
search_space:
do_blink.vpr_options.bb_factor:
type: int
low: 3
high: 30
do_blink.vpr_options.acc_fac:
type: float
low: 0.0
high: 0.7
do_blink.vpr_options.astar_fac:
type: float
low: 1.8
high: 30.0
do_blink.vpr_options.initial_pres_fac:
type: float
low: 2.828
high: 1000.0
do_blink.vpr_options.pres_fac_mult:
type: float
low: 10.0
high: 30.0
do_blink.vpr_options.max_criticality:
type: float
low: 0.0
high: 0.999
do_blink.vpr_options.target_ext_pin_util.input:
type: float
low: 0.0
high: 1.0
do_blink.vpr_options.target_ext_pin_util.output:
type: float
low: 0.0
high: 1.0
do_blink.vpr_options.place_algorithm:
type: categorical
choices:
- bounding_box
- criticality_timing
- slack_timing
|
name: RAIDers of corruption
# Long form description.
description: |+
Picked up these at a yardsale, there doesn't seem to be anything useful in there though!
# The flag
flag: CTF{I_g0t_Str1p3s}
# Task category. (one of hw, crypto, pwn, rev, web, net, misc)
category: misc
# === the fields below will be filled by SRE or automation ===
# Task label
label: ''
# URL for web challenges
link: ''
# host/port for non-web challenges
host: ''
# the URL for attachments, to be filled in by automation
attachment: ''
# is this challenge released? Will be set by SREs
visible: false
|
PytorchModel: example-models/2LayerModel.plk
PytorchDict: example-models/2LayerDict.plk
OutputDir: my-hls-dir-test
ProjectName: myproject
XilinxPart: xc7vx690tffg1927-2
ClockPeriod: 5
IOType: io_parallel # options: io_serial/io_parallel
ReuseFactor: 1
DefaultPrecision: ap_fixed<18,8>
|
<reponame>BearerPipelineTest/google-ctf
apiVersion: kctf.dev/v1
kind: Challenge
metadata:
name: pwn
spec:
deployed: true
powDifficultySeconds: 0
network:
public: false
healthcheck:
# TIP: disable the healthcheck during development
enabled: true
horizontalPodAutoscalerSpec:
maxReplicas: 10
minReplicas: 2
targetCPUUtilizationPercentage: 80
podTemplate:
template:
spec:
containers:
- name: 'challenge'
resources:
requests:
memory: "2048Mi"
cpu: "1000m"
|
name: vitis-ai-neptune
channels:
- file:///scratch/conda-channel
- pytorch
- defaults
- conda-forge/label/gcc7
- conda-forge
dependencies:
- python=3.6
- xip
- dpuv1-rt-neptune
|
name: dragonphy
outputs:
- dragonphy_top_tt.lib
- dragonphy_top_tt.db
- dragonphy_top.lef
- dragonphy_top.gds
- dragonphy_top.spi
- dragonphy_RDL.gds
- dragonphy_RDL.lef
commands:
- bash fixup.sh
|
<reponame>semify-eda/waveform-generator<gh_stars>1-10
name: Continuous Integration
on: [push]
jobs:
unit-tests:
runs-on: ubuntu-latest
timeout-minutes: 10
steps:
- uses: actions/checkout@main
- name: Compile and install iverilog
run: |
sudo apt install -y gperf
git clone https://github.com/steveicarus/iverilog.git
cd iverilog
sh autoconf.sh
./configure
make -j$(nproc)
sudo make install
- name: Install cocotb and modules
run: |
cd ~/work/waveform-generator/waveform-generator/
pip3 install cocotb
pip3 install cocotbext-wishbone
pip3 install cocotbext-axi
pip3 install cocotbext-spi
pip3 install matplotlib
pip3 install scipy
pip3 install numpy
- name: Run tests
run: |
cd ~/work/waveform-generator/waveform-generator/
make templates
make unit-tests
- name: Report failures
run: |
! grep failure design/*/sim/results.xml
|
<gh_stars>0
language: cpp
sudo: required
services:
- docker
matrix:
include:
- env: BUILD_TYPE=gcc CONFIG_FLAGS="-DCMAKE_CXX_COMPILER=g++-8"
- env: BUILD_TYPE=clang CONFIG_FLAGS="-DCMAKE_CXX_COMPILER=clang++-6.0 -DSLANG_COVERAGE=ON -DSLANG_SANITIZERS=undefined,address"
before_install:
- docker pull mpopoloski/slang:latest
- docker run -itd --name build mpopoloski/slang:latest
- docker exec build git clone https://github.com/MikePopoloski/slang.git
install:
- docker exec build /bin/sh -c "cd slang && mkdir build && cd build && cmake $CONFIG_FLAGS .."
script:
- export ci_env=`bash <(curl -s https://codecov.io/env)`
- if [ "$BUILD_TYPE" == "clang" ]; then docker exec $ci_env build slang/scripts/build_clang.sh; fi
- if [ "$BUILD_TYPE" == "gcc" ]; then docker exec $ci_env build slang/scripts/build_gcc.sh; fi
branches:
only:
- /.*/
git:
depth: 1
|
<filename>.travis.yml
dist: xenial
language: python
python:
- "3.4"
- "3.5"
- "3.6"
- "3.7"
# - "pypy3"
install: pip -q install tox codacy-coverage codecov
script:
- tox -e cover
after_success:
- python-codacy-coverage -r coverage.xml
- codecov
|
# Copyright 2019 Xilinx Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
description: inception-v4 classifier on ImageNet.
input size: 299*299
float ops: 24.5G
task: classification
framework: caffe
prune: 'no'
version: 1.4
files:
- name: cf_inceptionv4_imagenet_299_299_24.5G_1.4
type: float & quantized
board: GPU
download link: https://www.xilinx.com/bin/public/openDownload?filename=cf_inceptionv4_imagenet_299_299_24.5G_1.4.zip
checksum: 8a71218bf557af79d93f6e1346c03aad
- name: inception_v4
type: xmodel
board: zcu102 & zcu104 & kv260
download link: https://www.xilinx.com/bin/public/openDownload?filename=inception_v4-zcu102_zcu104_kv260-r1.4.1.tar.gz
checksum: 57e84965d954435c6e582d7826f08d30
- name: inception_v4
type: xmodel
board: vck190
download link: https://www.xilinx.com/bin/public/openDownload?filename=inception_v4-vck190-r1.4.1.tar.gz
checksum: 87562a0842fe2806867caaab5e8a0396
- name: inception_v4
type: xmodel
board: u50-DPUCAHX8H & u50lv-DPUCAHX8H & u280-DPUCAHX8H
download link: https://www.xilinx.com/bin/public/openDownload?filename=inception_v4-u50-u50lv-u280-DPUCAHX8H-r1.4.1.tar.gz
checksum: db2c16d6e3a56046f74645c8548d1a67
- name: inception_v4
type: xmodel
board: u50-DPUCAHX8L & u50lv-DPUCAHX8L & u280-DPUCAHX8L
download link: https://www.xilinx.com/bin/public/openDownload?filename=inception_v4-u50-u50lv-u280-DPUCAHX8L-r1.4.1.tar.gz
checksum: 94cc30cd6b9b731b6de5c40af7859cb9
license: https://github.com/Xilinx/Vitis-AI/blob/master/LICENSE
|
name: lint-editorconfig
on: [push, pull_request]
jobs:
editorconfig:
name: Editorconfig
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions/setup-python@v2
with:
python-version: 3.x
- name: Install requirements
run: pip install editorconfig-checker
- name: Run editorconfig checker
run: |
ec
|
<filename>ikondemo/actions/datagen_stop_multiblade.yml
---
- hosts: data-generators
gather_facts: False
tasks:
- name: stop data generators
command: "killall datagen_multiblade.bash udpgen_pcap"
ignore_errors: True
tags:
- multiblade
|
ebpc_encoder:
files: [
src/ebpc_pkg.sv,
src/fifo_slice.sv,
src/encoder/bpc_buffer.sv,
src/encoder/bpc_encoder.sv,
src/encoder/dbx_compressor.sv,
src/encoder/ebpc_encoder.sv,
src/encoder/seq_coder.sv,
src/encoder/shift_streamer.sv,
src/encoder/zrle.sv,
src/encoder/dbp_dbx_enc.sv,
]
ebpc_decoder:
files: [
src/ebpc_pkg.sv,
src/fifo_slice.sv,
src/decoder/bpc_decoder.sv,
src/decoder/buffer.sv,
src/decoder/ebpc_decoder.sv,
src/decoder/expander.sv,
src/decoder/symbol_decoder.sv,
src/decoder/unpacker.sv,
src/decoder/zrle_decoder.sv,
src/decoder/delta_reverse.sv,
]
|
apiVersion: apps/v1
kind: DaemonSet
metadata:
name: ctf-daemon
namespace: kube-system
labels:
k8s-app: ctf-daemon
spec:
selector:
matchLabels:
name: ctf-daemon
template:
metadata:
labels:
name: ctf-daemon
spec:
tolerations:
- key: node-role.kubernetes.io/master
effect: NoSchedule
containers:
- name: ctf-daemon
image: google/apparmor-loader:latest
securityContext:
privileged: true
command: ["sh", "-c", "while true; do for f in /profiles/*; do echo \"loading $f\"; apparmor_parser -r $f; sleep 30; done; done"]
volumeMounts:
- name: sys
mountPath: /sys
readOnly: true
- name: apparmor-includes
mountPath: /etc/apparmor.d
readOnly: true
- name: profiles
mountPath: /profiles
readOnly: true
volumes:
- name: sys
hostPath:
path: /sys
- name: apparmor-includes
hostPath:
path: /etc/apparmor.d
- name: profiles
configMap:
name: apparmor-profiles
|
title: ASIC Design
description: Introduction To ASIC Design
google_analytics: UA-93559920-1
gems:
- jekyll-sitemap
- jekyll-seo-tag
|
# Human readable task name
name: PCIVault 2
# Long form description.
description: |+
See PCIVault 1 for the description and the challenge.
Enter the second flag here.
# The flag
flag: CTF{B3h1nD_S3vEn_D4v1C3s}
# Task category. (one of hardware, crypto, pwn, reversing, web, net, misc)
category: hardware
# === the fields below will be filled by SRE or automation ===
# Task label
label: ''
# URL for web challenges
link: ''
# host/port for non-web challenges
host: 'pcivault.2021.ctfcompetition.com 1337'
# the URL for attachments, to be filled in by automation
attachment: ''
# is this challenge released? Will be set by SREs
visible: false
|
<filename>software/yml/cryo_config_SCOPE.yml
cryoAsicGen1:
enable: True
ForceWrite: False
EpixHRGen1Cryo:
enable: True
Oscilloscope:
enable: True
ArmReg: False
TrigReg: False
ScopeEnable: False
TriggerEdge: Falling
TriggerChannel: AcqStart
TriggerMode: Always
TriggerAdcThresh: 0
TriggerHoldoff: 0
TriggerOffset: 0
TraceLength: 2000
SkipSamples: 0
InputChannelA: Asic0TpsMux
InputChannelB: Asic0TpsMux
TriggerDelay: 0
|
event_unit_flex:
incdirs: [
./rtl,
]
files: [
./rtl/event_unit_core.sv,
./rtl/hw_barrier_unit.sv,
./rtl/hw_dispatch.sv,
./rtl/hw_mutex_unit.sv,
./rtl/interc_sw_evt_trig.sv,
./rtl/periph_FIFO_id.sv,
./rtl/soc_periph_fifo.sv,
./rtl/event_unit_interface_mux.sv,
./rtl/event_unit_top.sv
]
|
<reponame>benreynwar/axilent<filename>.travis.yml<gh_stars>1-10
language: python
python:
- "3.7"
os: linux
sudo: required
dist: bionic
before_install:
- sudo apt-get update -qq
- sudo apt-get install -y gnat zlib1g-dev
before_script:
- pip install fusesoc
- pip install -e .
- git clone --depth 1 https://github.com/ghdl/ghdl.git ghdl
- cd ghdl
- mkdir build
- cd build
- ../configure --prefix=../../install-ghdl/
- make
- make install
- cd ../../
- export PATH=$PATH:`pwd`/install-ghdl/bin/
- pip install cocotb
script:
- pytest tests
- cd tests; make
|
<gh_stars>1-10
# USBpix board
USB:
bit_file : "firmware/mio.bit" # Selecting FPGA firmware
board_id : # Selecting USBpix board by ID
avoid_download : True # Avoiding download of FPGA firmware if already initialized
SINGLE_CHIP_ADAPTER_CARD:
no_calibration : False
#QUAD_MODULE_ADAPTER_CARD:
# no_calibration : False
# Trigger
TRIGGER_CH1_TO_CH4:
TRIGGER_MODE : 0 # Selecting trigger mode: Use trigger inputs/trigger select (0), TLU no handshake (1), TLU simple handshake (2), TLU data handshake (3)
TRIGGER_SELECT : 0 # Selecting trigger input: RX2 (TDC loop-through) (8), RX1 (4), RX0 (2), MonHit/HitOR from Adapter Card (1), disabled (0)
TRIGGER_INVERT : 0 # Inverting trigger input: RX2 (TDC loop-through) (8), RX1 (4), RX0 (2), MonHit/HitOR from Adapter Card (1), disabled (0)
TRIGGER_VETO_SELECT : 1 # Selecting trigger veto: RX1 (2), RX FIFO full (1), disabled (0)
TRIGGER_HANDSHAKE_ACCEPT_WAIT_CYCLES : 3 # Minimum TLU trigger length (TLU data handshale mode) required for accepting the trigger (preventing certain EUDAQ TLU firmware flaws)
TRIGGER_DATA_DELAY : 8 # Depends on the cable length and should be adjusted (run scan/tune_tlu.py)
TRIGGER_THRESHOLD : 0 # Minimum trigger length (standard trigger and TLU no handshake mode) required for accepting the trigger
DATA_FORMAT : 0 # 31bit trigger number (0), 31bit time stamp (1), combined (15bit time stamp + 16bit trigger number) (2)
# TDC for high precision charge measurements
TDC_RX2:
EN_WRITE_TIMESTAMP : 0 # Writing trigger timestamp
EN_TRIGGER_DIST : 0 # Measuring trigger to TDC delay with 640MHz clock
EN_NO_WRITE_TRIG_ERR : 0 # Writing TDC word only if valid trigger occurred
EN_INVERT_TDC : 0 # Inverting TDC input
EN_INVERT_TRIGGER : 0 # Inverting trigger input
# FE-I4 command output
CMD_CH1_TO_CH4:
OUTPUT_MODE : 0 # Selecting command output mode: positive edge (0), negative edge (1), Manchester Code according to IEEE 802.3 (2), Manchester Code according to G.E. Thomas (3)
# FE-I4 data receiver
DATA_CH4:
INVERT_RX : 0 # Inverting data input: disabled (0), enabled (e.g. for DBM modules) (1)
DATA_CH3:
INVERT_RX : 0
DATA_CH2:
INVERT_RX : 0
DATA_CH1:
INVERT_RX : 0
|
name: oxide_ci
on: [push, pull_request]
jobs:
install-use-oxide-conda:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions/setup-python@v2
with:
python-version: '3.7'
- run: bash scripts/setup -ci
- run: which pip3 && which python3 && which pip
- run: make env
- run: which pip3 && which python3 && which pip
- run: pwd && source env/conda/bin/activate cfu-common && source environment && riscv32-elf-newlib-gcc --version
- run: pwd && source env/conda/bin/activate cfu-common && source environment && yosys --version && nextpnr-nexus --version
- run: pwd && source env/conda/bin/activate cfu-common && source environment && cd proj/proj_template_v && pip3 list && make PLATFORM=hps bitstream
- run: pwd && source env/conda/bin/activate cfu-common && source environment && ulimit -S -t 900 && ulimit -H -t 900 && cd proj/hps_accel && pip3 list && make PLATFORM=hps bitstream || true
- run: pwd && source env/conda/bin/activate cfu-common && source environment && ulimit -S -t 900 && ulimit -H -t 900 && cd proj/hps_accel && pip3 list && make PLATFORM=hps EXTRA_LITEX_ARGS="--cpu-variant=slimopt+cfu" clean bitstream || true
- run: pwd && source env/conda/bin/activate cfu-common && source environment && ulimit -S -t 900 && ulimit -H -t 900 && cd proj/hps_accel && make PLATFORM=hps EXTRA_NEXTPNR_ARGS="--placer-heap-timingweight 52" EXTRA_LITEX_ARGS="--separate-arena --cpu-variant=slimopt+cfu --extra-nextpnr-params" clean bitstream || true
- run: pwd && source env/conda/bin/activate cfu-common && source environment && ulimit -S -t 240 && ulimit -H -t 240 && cd proj/hps_accel && make PLATFORM=hps EXTRA_NEXTPNR_ARGS="--placer-heap-timingweight 52" EXTRA_LITEX_ARGS="--separate-arena --cpu-variant=slimopt+cfu --extra-nextpnr-params --no-compile-software" clean bitstream || true
- run: pwd && source env/conda/bin/activate cfu-common && source environment && cd proj/mport && pip3 list && make PLATFORM=hps bitstream software
|
language: c
cache: ccache
sudo: true
dist: trusty
services:
- docker
before_install:
- docker pull librecores/librecores-ci-openrisc
- docker images
script:
- docker run --rm -v $(pwd):/src -e "JOB=$JOB" -e "SIM=$SIM" -e "PIPELINE=$PIPELINE" -e "EXPECTED_FAILURES=$EXPECTED_FAILURES" -e "EXTRA_CORE_ARGS=$EXTRA_CORE_ARGS" librecores/librecores-ci-openrisc /src/.travis/test.sh
matrix:
fast_finish: true
jobs:
allow_failures:
- env: JOB=or1k-tests SIM=icarus PIPELINE=ESPRESSO
include:
- stage: verilator
env: JOB=verilator
- stage: testing
env: JOB=or1k-tests SIM=icarus PIPELINE=CAPPUCCINO EXPECTED_FAILURES="or1k-cy"
- stage: testing
env: JOB=or1k-tests SIM=icarus PIPELINE=CAPPUCCINO EXPECTED_FAILURES="or1k-cy" EXTRA_CORE_ARGS="--feature_dmmu NONE"
- stage: testing
env: JOB=or1k-tests SIM=icarus PIPELINE=CAPPUCCINO EXPECTED_FAILURES="or1k-cy or1k-dsxinsn" EXTRA_CORE_ARGS="--feature_immu NONE"
- stage: testing
env: JOB=or1k-tests SIM=icarus PIPELINE=CAPPUCCINO EXPECTED_FAILURES="or1k-cy" EXTRA_CORE_ARGS="--feature_datacache NONE"
- stage: testing
env: JOB=or1k-tests SIM=icarus PIPELINE=CAPPUCCINO EXPECTED_FAILURES="or1k-cy" EXTRA_CORE_ARGS="--feature_instructioncache NONE"
- stage: testing
env: JOB=or1k-tests SIM=icarus PIPELINE=CAPPUCCINO EXPECTED_FAILURES="or1k-cy" EXTRA_CORE_ARGS="--feature_debugunit NONE"
- stage: testing
env: JOB=or1k-tests SIM=icarus PIPELINE=CAPPUCCINO EXPECTED_FAILURES="or1k-cy or1k-cmov" EXTRA_CORE_ARGS="--feature_cmov NONE"
- stage: testing
env: JOB=or1k-tests SIM=icarus PIPELINE=CAPPUCCINO EXPECTED_FAILURES="or1k-cy or1k-ext" EXTRA_CORE_ARGS="--feature_ext NONE"
- stage: testing
env: JOB=or1k-tests SIM=icarus PIPELINE=ESPRESSO
|
# you describe your data using the YAML notation here
# and then load them using Fixtures.load("data.yml")
# User(bob):
# email: <EMAIL>
# password: <PASSWORD>
# fullname: Bob
User(joel):
email: <EMAIL>
password: <PASSWORD>
fullname: Joel
|
<gh_stars>1-10
- Global:
Print : true
- Library:
Name : zynqmp_acp_adapter_library
Format : "add_vhdl_file sources_1 #{library_name} #{file_name}"
PathList :
- "../../../src/main/vhdl/"
- "../../../PipeWork/src/components/reducer.vhd"
- "../../../PipeWork/src/components/queue_register.vhd"
- "../../../PipeWork/src/components/queue_receiver.vhd"
- "../../../PipeWork/src/components/sdpram.vhd"
- "../../../PipeWork/src/components/sdpram_xilinx_ultrascale_auto_select.vhd"
Top : ["ZYNQMP_ACP_ADAPTER"]
|
<gh_stars>10-100
apb_adv_timer:
incdirs: [
.,
]
files: [
adv_timer_apb_if.sv,
comparator.sv,
lut_4x4.sv,
out_filter.sv,
up_down_counter.sv,
input_stage.sv,
prescaler.sv,
apb_adv_timer.sv,
timer_cntrl.sv,
timer_module.sv,
]
|
<filename>.buildkite/pipelines/mem_tile_only.yml
agents: { jobsize: "hours" }
env:
GOLD: /build/mem_tile_only.${BUILDKITE_BUILD_NUMBER}
OVERRIDE_MFLOWGEN_BRANCH: silent_fail
steps:
##############################################################################
# INDIVIDUAL TILE RUNS - Mem tile only
- label: 'setup'
commands:
- 'source mflowgen/bin/setup-buildkite.sh --dir $$GOLD;
mflowgen run --design $$GARNET_HOME/mflowgen/tile_array'
- wait: ~
- label: 'MemTile'
commands:
- 'source mflowgen/bin/setup-buildkite.sh --dir $$GOLD --need_space 30G;
set -o pipefail;
make Tile_MemCore |& tee make-mem.log'
|
axi_mem_if_DP:
files: [
axi_mem_if_MP_Hybrid_multi_bank.sv,
axi_mem_if_multi_bank.sv,
axi_mem_if_DP_hybr.sv,
axi_mem_if_DP.sv,
axi_mem_if_SP.sv,
axi_read_only_ctrl.sv,
axi_write_only_ctrl.sv,
]
|
<reponame>ManuelG28/pulpino<filename>ips/adv_dbg_if/src_files.yml
adv_dbg_if:
incdirs: [
rtl,
]
files: [
rtl/adbg_axi_biu.sv,
rtl/adbg_axi_module.sv,
rtl/adbg_lint_biu.sv,
rtl/adbg_lint_module.sv,
rtl/adbg_crc32.v,
rtl/adbg_or1k_biu.sv,
rtl/adbg_or1k_module.sv,
rtl/adbg_or1k_status_reg.sv,
rtl/adbg_top.sv,
rtl/bytefifo.v,
rtl/syncflop.v,
rtl/syncreg.v,
rtl/adbg_tap_top.v,
rtl/adv_dbg_if.sv,
rtl/adbg_axionly_top.sv,
rtl/adbg_lintonly_top.sv,
]
|
package:
name: udma_external_per
authors:
- "<NAME> <<EMAIL>>"
dependencies:
udma_core: { git: "<EMAIL>:pulp-platform/udma_core.git", version: 1.0.1 }
sources:
# Source files grouped in levels. Files in level 0 have no dependencies on files in this
# package. Files in level 1 only depend on files in level 0, files in level 2 on files in
# levels 1 and 0, etc. Files within a level are ordered alphabetically.
# Level 0
- rtl/udma_external_per_reg_if.sv
- rtl/udma_traffic_gen_rx.sv
- rtl/udma_traffic_gen_tx.sv
# Level 1
- rtl/udma_external_per_top.sv
# Level 2
- rtl/udma_external_per_wrapper.sv
|
<filename>src_files.yml<gh_stars>1-10
apb_uart_sv:
files: [
apb_uart_sv.sv,
uart_rx.sv,
uart_tx.sv,
io_generic_fifo.sv,
uart_interrupt.sv,
]
|
<reponame>nguyenhuydong1998/osd-hw
module: test_regaccess
sources:
- ../../../interfaces/common/dii_channel.sv
- ../common/osd_regaccess.sv
toplevel: osd_regaccess
simulators:
- vcs
parameters:
MOD_VENDOR: 1
MOD_TYPE: 2
MOD_VERSION: 3
MOD_EVENT_DEST_DEFAULT: 4
CAN_STALL: 1
MAX_REG_SIZE: 128
|
---
# Firmware folder relative to repository root
firmwareFolder: firmware/fox_hoplite_multicast
hdlFolder: hdl/fox_hoplite_multicast
|
<gh_stars>1-10
linters-settings:
golint:
min-confidence: 0.3
gocyclo:
min-complexity: 28
goimports:
local-prefixes: github.com
linters:
enable:
- deadcode
- dupl
- errcheck
- gocritic
- gocyclo
- goimports
- golint
- gosimple
- govet
- ineffassign
- megacheck
- nakedret
- scopelint
- staticcheck
- structcheck
- unused
- varcheck
disable-all: true
|
name: lint-sv
on: [push, pull_request]
env:
VERIBLE_VERSION: 0.0-1051-gd4cd328
jobs:
system_verilog:
name: System Verilog Sources
runs-on: ubuntu-20.04
steps:
- uses: actions/checkout@v2
- name: Install Verible
run: |
set -e
mkdir -p build/verible
cd build/verible
curl -Ls -o verible.tar.gz https://github.com/google/verible/releases/download/v$VERIBLE_VERSION/verible-v$VERIBLE_VERSION-Ubuntu-20.04-focal-x86_64.tar.gz
sudo mkdir -p /tools/verible && sudo chmod 777 /tools/verible
tar -C /tools/verible -xf verible.tar.gz --strip-components=1
echo "PATH=$PATH:/tools/verible/bin" >> $GITHUB_ENV
#https://github.com/actions/toolkit/blob/master/docs/problem-matchers.md#problem-matchers
- name: Run Lint Verible
run: |
echo "::add-matcher::.github/verible-lint-matcher.json"
utils/run_verible.sh
echo "::remove-matcher owner=verible-lint-matcher::"
|
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Author: Sajjad "JJ" Arshad (sajjadium)
apiVersion: kctf.dev/v1
kind: Challenge
metadata:
name: tridroid
spec:
deployed: true
powDifficultySeconds: 60
network:
public: true
healthcheck:
enabled: true
podTemplate:
template:
spec:
containers:
- name: challenge
volumeMounts:
- name: kvm-volume
mountPath: /dev/kvm
securityContext:
privileged: true
volumes:
- name: kvm-volume
hostPath:
path: /dev/kvm
nodeSelector:
dedicated: kvm
tolerations:
- key: "dedicated"
operator: "Equal"
value: "kvm"
effect: "NoExecute"
|
name: build website
on:
push:
branches:
- main
jobs:
deploy:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Set up Node
uses: actions/setup-node@v2
with:
node-version: "12"
- name: Build
run: |
cd website
yarn install && yarn build
# - name: GitHub Pages action
# uses: peaceiris/[email protected]
# with:
# github_token: ${{ secrets.GITHUB_TOKEN }}
# publish_dir: ./reconfig/build
|
udma_i2s:
files: [
rtl/i2s_clk_gen.sv,
rtl/i2s_rx_channel.sv,
rtl/i2s_ws_gen.sv,
rtl/cic_top.sv,
rtl/cic_integrator.sv,
rtl/cic_comb.sv,
rtl/udma_i2s_multich.sv,
rtl/udma_i2s_2ch.sv,
rtl/udma_i2s_reg_if.sv,
]
|
- import_playbook: integration_test_env_check.yml
- import_playbook: integration_test_start_up.yml
- import_playbook: integration_test_commands.yml
- import_playbook: integration_test_end.yml
- import_playbook: integration_test_checks.yml
|
# Copyright 2020 ETH Zurich and University of Bologna.
# Solderpad Hardware License, Version 0.51, see LICENSE for details.
# SPDX-License-Identifier: SHL-0.51
package:
name: mem_interface
authors:
- <NAME> <<EMAIL>>
dependencies:
reqrsp_interface: {path: ../reqrsp_interface}
export_include_dirs:
- include
sources:
- src/mem_wide_narrow_mux.sv
- src/mem_interface.sv
- target: simulation
files:
- src/mem_test.sv
- target: test
files:
# Level 0
- test/mem_wide_narrow_mux_tb.sv
|
name: ci
on: [push, pull_request]
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: YosysHQ/setup-oss-cad-suite@v1
- name: Run checks
run: tabbypip install xmlschema && make ci
|
# Copyright 2019 Xilinx Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
description: FPN(ResNet18) for segmentation on Cityscapes.
input size: 256*512
float ops: 10G
task: segmentation
framework: pytorch
prune: 'no'
version: 2.0
files:
- name: pt_SemanticFPN_cityscapes_256_512_10G_2.0
type: float & quantized
board: GPU
download link: https://www.xilinx.com/bin/public/openDownload?filename=pt_SemanticFPN_cityscapes_256_512_10G_2.0.zip
checksum: 1843665f70522e2453e3534f615d47ee
- name: SemanticFPN_cityscapes_pt
type: xmodel
board: zcu102 & zcu104 & kv260
download link: https://www.xilinx.com/bin/public/openDownload?filename=SemanticFPN_cityscapes_pt-zcu102_zcu104_kv260-r2.0.0.tar.gz
checksum: 480f2edac8fa3063d60746efac9f13b0
- name: SemanticFPN_cityscapes_pt
type: xmodel
board: vck190
download link: https://www.xilinx.com/bin/public/openDownload?filename=SemanticFPN_cityscapes_pt-vck190-r2.0.0.tar.gz
checksum: 99e3c1105918e751cf4c867ecc47f8db
- name: SemanticFPN_cityscapes_pt
type: xmodel
board: vck50006pe-DPUCVDX8H-DWC
download link: https://www.xilinx.com/bin/public/openDownload?filename=SemanticFPN_cityscapes_pt-vck50006pe-DPUCVDX8H-DWC-r2.0.0.tar.gz
checksum: f8e5ba175dc8803ca872fcad36537945
- name: SemanticFPN_cityscapes_pt
type: xmodel
board: vck50008pe-DPUCVDX8H
download link: https://www.xilinx.com/bin/public/openDownload?filename=SemanticFPN_cityscapes_pt-vck50008pe-DPUCVDX8H-r2.0.0.tar.gz
checksum: cc793a21bbb3efe86fe135a6f3bcae23
- name: SemanticFPN_cityscapes_pt
type: xmodel
board: u50lv-DPUCAHX8H
download link: https://www.xilinx.com/bin/public/openDownload?filename=SemanticFPN_cityscapes_pt-u50lv-DPUCAHX8H-r2.0.0.tar.gz
checksum: 8ddfbf92649543d51e9c7f8da0ca842f
- name: SemanticFPN_cityscapes_pt
type: xmodel
board: u50lv-DPUCAHX8H-DWC & u55c-DPUCAHX8H-DWC
download link: https://www.xilinx.com/bin/public/openDownload?filename=SemanticFPN_cityscapes_pt-u55c-u50lv-DPUCAHX8H-DWC-r2.0.0.tar.gz
checksum: d156fd1d262ecbf3cf623d621e7b2dea
license: https://github.com/Xilinx/Vitis-AI/blob/master/LICENSE
|
options:
sim_args:
usage:
simulation args pass to simulator
with_value_action:
sim_option:
- $sim_args
compile_args:
usage:
compiling args pass to simulator
with_value_action:
compile_option:
- $compile_args
|
<filename>app/Tlut/configs/arch/arch_tlut_systolic_projection_bank8_block32.yml
# This file defines single architecture set for tlut systolic array performance projection
- proj_16_16_bank8_block32
- proj_32_32_bank8_block32
- proj_64_64_bank8_block32
- proj_128_128_bank8_block32
|
# Copyright 2019 Xilinx Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
description: PointPainting on nuScenes
task: Sensor-fusion with 2d semantic segmentation and 3d detection
framework: pytorch
prune: 'no'
version: 2.0
part 1: PointPillars of PonitPainting
task 1: 3d detection for sensor fusion
input size 1: 40000*64*16
float ops 1: 112G
part 2: SemanticFPN of PointPainting
task 2: 2d semantic segmentation for sensor fusion
input size 2: 320*576*3
float ops 2: 14G
files:
- name: pt_pointpainting_nuscenes_2.0
type: float & quantized
board: GPU
download link: https://www.xilinx.com/bin/public/openDownload?filename=pt_pointpainting_nuscenes_2.0.zip
checksum: 03ea43a65ac4b14ace67ad0dc3736c48
- name: ppointpainting_nuscenes_40000_64_0_pt
type: xmodel
board: zcu102 & zcu104 & kv260
download link: download link
checksum: md5sum value
- name: pointpainting_nuscenes_40000_64_1_pt
type: xmodel
board: zcu102 & zcu104 & kv260
download link: https://www.xilinx.com/bin/public/openDownload?filename=pointpainting_nuscenes_40000_64_1_pt-zcu102_zcu104_kv260-r2.0.0.tar.gz
checksum: f5521f8aef64a840e9fb1e27d3edb1e2
- name: semanticfpn_nuimage_576_320_pt
type: xmodel
board: zcu102 & zcu104 & kv260
download link: https://www.xilinx.com/bin/public/openDownload?filename=semanticfpn_nuimage_576_320_pt-zcu102_zcu104_kv260-r2.0.0.tar.gz
checksum: 1c60952aabb0220d66f45262c3dd90de
- name: pointpainting_nuscenes_40000_64_0_pt
type: xmodel
board: vck190
download link: https://www.xilinx.com/bin/public/openDownload?filename=pointpainting_nuscenes_40000_64_0_pt-vck190-r2.0.0.tar.gz
checksum: fae76b18c213dd4c8750e36ff7383028
- name: pointpainting_nuscenes_40000_64_1_pt
type: xmodel
board: vck190
download link: https://www.xilinx.com/bin/public/openDownload?filename=pointpainting_nuscenes_40000_64_1_pt-vck190-r2.0.0.tar.gz
checksum: 19a27526f393bed949c454373d296422
- name: semanticfpn_nuimage_576_320_pt
type: xmodel
board: vck190
download link: https://www.xilinx.com/bin/public/openDownload?filename=semanticfpn_nuimage_576_320_pt-vck190-r2.0.0.tar.gz
checksum: 255abd95d11df4118a9342b86bb78c4b
license: license link
|
<gh_stars>100-1000
apb_uart:
files: [
src/apb_uart.vhd,
src/slib_clock_div.vhd,
src/slib_counter.vhd,
src/slib_edge_detect.vhd,
src/slib_fifo.vhd,
src/slib_input_filter.vhd,
src/slib_input_sync.vhd,
src/slib_mv_filter.vhd,
src/uart_baudgen.vhd,
src/uart_interrupt.vhd,
src/uart_receiver.vhd,
src/uart_transmitter.vhd,
]
|
# Build configure for https://www.tea-ci.org (fork of Drone CI with Msys2 support)
# 32-bit until release including https://sourceware.org/git/gitweb.cgi?p=newlib-cygwin.git;a=commitdiff;h=0aa738220bb9dea2ad479e484560767b36701947
build:
image: teaci/msys32
shell: mingw32
pull: true
branches:
-master
environment:
- SUBTARGET=tiny
- MINGW32=/c/msys32/mingw32
- MAME_EXE=mametiny.exe
- IGNORE_GIT=1
- TOOLS=1
# - SOURCES=src/mame/drivers/pacman.cpp
commands:
# - pacman -S -q --noprogressbar --noconfirm winpty
- make
- ./$MAME_EXE -validate && echo "Validation successful"
notify:
irc:
prefix: build
nick: $$IRC_USER
channel: mame-dev
server:
host: $$IRC_HOST
port: $$IRC_PORT
password: <PASSWORD>
tls: true
when:
success: false
failure: true
change: true
# Need to regen secrets file (.drone.sec) from within tea-ci.org to enable
#notify:
# email:
# from: <EMAIL>
# host: $$EMAIL_SERVER
# username: $$EMAIL_USERNAME
# password: <PASSWORD>
# recipients:
# - $$EMAIL_LISTADDR
# when:
# success: false
# failure: false
# change: true
|
# Copyright 2020 by the project contributors
# SPDX-License-Identifier: GPL-3.0-only
#
# Author(s): <NAME> <<EMAIL>>
version: v1.0
name: Ubuntu18 Based Pipeline
agent:
machine:
type: e1-standard-2 # Linux machine type with 2 vCPUs, 4 GB of RAM
os_image: ubuntu1804 # The Ubuntu 18.04 OS image.
fail_fast:
stop:
when: "true" # enable strategy for branches, tags, and pull-requests
blocks:
- name: "Build & Test"
task:
jobs:
- name: Build & Test the BCPU
commands:
# Prepare the machine for a build
- sudo fallocate -l 3G /swapfile
- sudo chmod 600 /swapfile
- sudo mkswap /swapfile
- sudo swapon /swapfile
# Translate the project
- checkout
- git clone https://github.com/benycze/bsc-docker-container.git
- cd bsc-docker-container/scripts && bash bootstrap.sh && cd .. && docker build --build-arg USER=$USER --build-arg UID=`id -u` --build-arg `id -g` -t localhost/bsc-compiler . && cd ..
- cd bsv/ && ./start-container.sh -s
|
<gh_stars>0
language: c
before_install:
- sudo apt-get install -yy qemu-system-mips
- curl -sc /tmp/gcokie 'https://drive.google.com/uc?export=download&id=1Mo5hGcCg1zm8Ca0KQB-G5PNI3fXfAAmR'
- getcode="$(awk '/_warning_/ {print $NF}' /tmp/gcokie)"
- curl -s -C - -LOJb /tmp/gcokie "https://drive.google.com/uc?export=download&confirm=${getcode}&id=1Mo5hGcCg1zm8Ca0KQB-G5PNI3fXfAAmR"
- tar -xf Codescape.GNU.Tools.Package.2016.05-06.for.MIPS.MTI.Bare.Metal.CentOS-5.x86_64.tar.gz
script:
- make CROSS_COMPILE=$PWD/mips-mti-elf/2016.05-06/bin/mips-mti-elf- ON_FPGA=y -j4
- make CROSS_COMPILE=$PWD/mips-mti-elf/2016.05-06/bin/mips-mti-elf- ON_FPGA=y clean
- make CROSS_COMPILE=$PWD/mips-mti-elf/2016.05-06/bin/mips-mti-elf- ON_FPGA=n -j4
- echo "Running Tests"
- timeout 5s qemu-system-mipsel -M mipssim -m 32M -nographic -kernel obj/ucore-kernel-initrd -monitor none -serial file:test.log || true
- cat test.log
- grep 'user sh is running!!!' test.log
addons:
apt:
update: true
|
#
# List of RTL sources. Contrarily to IPs, these reside in
# the current Git repository.
# Uses the YAML syntax.
# 'domain' refers to the two soc,cluster domains for FPGA
# emulator synthesis
#
tb:
path: tb
domain: [soc]
vip:
path: vip
domain: [soc]
pulpemu:
path: pulpemu
domain: [soc]
pulp:
path: pulp
domain: [soc]
|
<gh_stars>1-10
axi:
files:
- src/axi_pkg.sv
- src/axi_intf.sv
- src/axi_atop_filter.sv
- src/axi_cut.sv
- src/axi_delayer.sv
- src/axi_join.sv
- src/axi_lite_join.sv
- src/axi_lite_to_axi.sv
- src/axi_modify_address.sv
- src/axi_to_axi_lite.sv
axi_sim:
files:
- src/axi_test.sv
flags:
- skip_synthesis
- only_local
|
<reponame>iicarus-bit/google-ctf
#
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
runtime: python
env: flex
entrypoint: gunicorn -w 4 -b 0.0.0.0:$PORT -b 0.0.0.0:80 main:app
service: cookie-world-order
runtime_config:
python_version: 2
manual_scaling:
# If you changee this it might break the SSRF
instances: 1
env_variables:
NO_PROXY: 'localhost,127.0.0.1'
|
version: '3'
services:
db:
image: mysql:latest
ports:
- "3306:3306"
volumes:
- ./var/mysql:/var/lib/mysql
environment:
- MYSQL_DATABASE=db
- MYSQL_USER=admin
- MYSQL_PASSWORD=password
- MYSQL_ROOT_PASSWORD=password
web:
# image: eboraas/apache-php
image: tutum/apache-php
ports:
- "80:80"
- "443:443"
links:
- db
volumes:
- ./www:/app
|
name: Sync + Release
on:
repository_dispatch:
types: [sync_release]
workflow_dispatch:
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Sync with upstream + Release
run: ./.github/sync_release.sh <EMAIL> <EMAIL>
env:
NOTIFICATION_API_KEY: ${{ secrets.NOTIFICATION_API_KEY }}
|
<reponame>Malcolmnixon/VhdlTest<filename>example/test.yaml
files:
- full_adder.vhd
- full_adder_pass_tb.vhd
- full_adder_fail_tb.vhd
- half_adder.vhd
- half_adder_pass_tb.vhd
- half_adder_fail_tb.vhd
tests:
- full_adder_pass_tb
- full_adder_fail_tb
- half_adder_pass_tb
- half_adder_fail_tb
|
# Copyright 2019 Xilinx Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
description: ssd-resnet34 detector based on COCO.
input size: 1200*1200
float ops: 433G
task: detection
framework: tensorflow
prune: 'no'
version: 2.0
files:
- name: tf_mlperf_resnet34_coco_1200_1200_433G_2.0
type: float & quantized
board: GPU
download link: https://www.xilinx.com/bin/public/openDownload?filename=tf_mlperf_resnet34_coco_1200_1200_433G_2.0.zip
checksum: ba5e2b8b97f56391987100e19e82dcd1
- name: mlperf_ssd_resnet34_tf
type: xmodel
board: zcu102 & zcu104 & kv260
download link: https://www.xilinx.com/bin/public/openDownload?filename=mlperf_ssd_resnet34_tf-zcu102_zcu104_kv260-r2.0.0.tar.gz
checksum: 982ac73b0dbd5d20efa3885e93f8bb49
- name: mlperf_ssd_resnet34_tf
type: xmodel
board: vck190
download link: https://www.xilinx.com/bin/public/openDownload?filename=mlperf_ssd_resnet34_tf-vck190-r2.0.0.tar.gz
checksum: feca40b1a1001de9896b0fe12ecd1160
- name: mlperf_ssd_resnet34_tf
type: xmodel
board: vck50006pe-DPUCVDX8H-DWC
download link: https://www.xilinx.com/bin/public/openDownload?filename=mlperf_ssd_resnet34_tf-vck50006pe-DPUCVDX8H-DWC-r2.0.0.tar.gz
checksum: 9d8e252957c1f8f721d8d78b75051619
- name: mlperf_ssd_resnet34_tf
type: xmodel
board: vck50008pe-DPUCVDX8H
download link: https://www.xilinx.com/bin/public/openDownload?filename=mlperf_ssd_resnet34_tf-vck50008pe-DPUCVDX8H-r2.0.0.tar.gz
checksum: d0ee75ec8469403b1449fc030dab93e9
- name: mlperf_ssd_resnet34_tf
type: xmodel
board: u50lv-DPUCAHX8H
download link: https://www.xilinx.com/bin/public/openDownload?filename=mlperf_ssd_resnet34_tf-u50lv-DPUCAHX8H-r2.0.0.tar.gz
checksum: 8e739afd48632fcc844b25be5e211af5
- name: mlperf_ssd_resnet34_tf
type: xmodel
board: u50lv-DPUCAHX8H-DWC & u55c-DPUCAHX8H-DWC
download link: https://www.xilinx.com/bin/public/openDownload?filename=mlperf_ssd_resnet34_tf-u55c-u50lv-DPUCAHX8H-DWC-r2.0.0.tar.gz
checksum: 4628ef8e522dcf7be9f7fe4020eb34ef
license: https://github.com/Xilinx/Vitis-AI/blob/master/LICENSE
|
# Device description for the Hewlett Packard Pulse-/ Pattern Generator.
# Only GPIB shows working communication with this device!
identifier : HEWLETT-PACKARD,HP81104A
channel 1:
on : OUTP1:NORM:STAT ON
off : OUTP1:NORM:STAT OFF
channel 2:
on : OUTP2:NORM:STAT ON
off : OUTP2:NORM:STAT OFF
on : OUTP1:NORM:STAT ON
off : OUTP1:NORM:STAT OFF
set_frequency : FREQ
get_frequency : FREQ?
|
---
algorithm:
class: Nsga2
population_size: 200
probabilities:
crossover: 0.5
mutation: 0.01
injection: 0.9
shorten_individual: true
init:
method: ramped # grow or full or ramped
sensible_depth: 7
inject:
method: grow # grow or full or random
sensible_depth: 7
termination:
max_steps: 1000
on_individual: stopping_condition
grammar:
class: Abnf::File
filename: sample/ant_trail_tcc/grammar.abnf
mapper:
class: DepthLocus
crossover:
class: CrossoverRipple
margin: 2 #1
step: 2
mutation:
class: MutationRipple
store:
class: Store
filename: ./ant_nsga2_tcc.store
report:
class: PopulationReport
individual:
class: PipedIndividual
shorten_chromozome: false
_pareto:
:fitness: maximize
:used_length: minimize
_pipe_output:
- :fitness: to_i
_thresholds:
:fitness: 89
_mark_phenotype: "\nMARKER\n"
evaluator:
class: WorkPipes
commands:
- 'ruby sample/ant_trail_tcc/ant_pipe.rb ONE sample/ant_trail_tcc/ant_evaluate.c'
- 'ruby sample/ant_trail_tcc/ant_pipe.rb TWO sample/ant_trail_tcc/ant_evaluate.c'
# for the remote pipe configuration, use:
#
# - 'ssh user@host "ruby /full/path/to/geret/sample/ant_trail_tcc/ant_pipe.rb ID sample/ant_trail_tcc/ant_evaluate.c"'
#
# note the ssh connection must use public/private key pair (no password) for authentication.
# (see eg. http://www.petefreitag.com/item/532.cfm
# or http://www.debian-administration.org/article/SSH_with_authentication_key_instead_of_password )
|
# Copyright 2021 ETH Zurich and University of Bologna.
# Licensed under the Apache License, Version 2.0, see LICENSE for details.
# SPDX-License-Identifier: Apache-2.0
package:
name: mempool
dependencies:
axi: { git: "https://github.com/pulp-platform/axi.git", version: 0.27.1 }
cluster_interconnect: { git: "https://github.com/pulp-platform/cluster_interconnect.git", version: 1.2.0 }
common_cells: { git: "https://github.com/pulp-platform/common_cells.git", version: 1.20.0 }
register_interface: { git: "https://github.com/pulp-platform/register_interface.git", version: 0.2.0 }
snitch: { path: "hardware/deps/snitch" }
tech_cells_generic: { git: "https://github.com/pulp-platform/tech_cells_generic.git", version: 0.2.1 }
workspace:
checkout_dir: "./hardware/deps"
export_include_dirs:
- hardware/include
sources:
# Level 0
- hardware/src/axi_hier_interco.sv
- hardware/src/mempool_pkg.sv
- hardware/src/mempool_cc.sv
- hardware/src/reorder_buffer.sv
- hardware/src/snitch_addr_demux.sv
- hardware/src/tcdm_adapter.sv
- hardware/src/tcdm_shim.sv
- hardware/src/address_scrambler.sv
- hardware/src/axi2mem.sv
- hardware/src/bootrom.sv
- hardware/src/latch_scm.sv
# Level 1
- hardware/src/mempool_tile.sv
# Level 2
- hardware/src/mempool_group.sv
# Level 3
- hardware/src/mempool_cluster.sv
# Level 4
- hardware/src/mempool_cluster_wrap.sv
- hardware/src/ctrl_registers.sv
# Level 5
- hardware/src/mempool_system.sv
- target: mempool_vsim
files:
# Level 1
- hardware/tb/axi_uart.sv
- hardware/tb/traffic_generator.sv
# Level 2
- hardware/tb/mempool_tb.sv
- target: mempool_verilator
files:
# Level 1
- hardware/tb/axi_uart.sv
- hardware/tb/traffic_generator.sv
# Level 2
- hardware/tb/mempool_tb_verilator.sv
- target: fpga
files:
# Level 1
- hardware/src/axi_rab_wrap.sv
|
# Copyright 2020 ETH Zurich and University of Bologna.
# Solderpad Hardware License, Version 0.51, see LICENSE for details.
# SPDX-License-Identifier: SHL-0.51
package:
name: snitch_const_cache
authors:
- <NAME> <<EMAIL>>
- <NAME> <<EMAIL>>
dependencies:
axi: {path: ../../vendor/pulp_axi}
snitch_icache: {path: ../snitch_icache}
sources:
# Level 0:
- src/snitch_const_cache.sv
|
<reponame>SHirsch78/XSharpDev<gh_stars>10-100
name: 'nightly artifacts cleanup'
on:
schedule:
- cron: '0 1 * * *' # every night at 1 am UTC
workflow_dispatch:
jobs:
delete-artifacts:
runs-on: windows-2019
steps:
- uses: kolpav/purge-artifacts-action@v1
with:
token: ${{ secrets.GITHUB_TOKEN }}
expire-in: 30days # Set this to 0 to delete all artifacts
|
<reponame>FPGA-Research-Manchester/FABulous
name: fabulous-doc
channels:
- conda-forge
- defaults
dependencies:
- python>=3.8
- pip
- pip:
- -r ./requirements.txt
- graphviz
|
FPGA Core dts01 Temperature:
- id: 8
FPGA Core dts11 Temperature:
- id: 9
FPGA Core dts12 Temperature:
- id: 13
FPGA Core dts21 Temperature:
- id: 10
FPGA Core dts22 Temperature:
- id: 14
FPGA Core dts31 Temperature:
- id: 11
FPGA Core dts32 Temperature:
- id: 15
FPGA Core dts41 Temperature:
- id: 12
FPGA Core dts42 Temperature:
- id: 16
HSSI_0_0 dts1 Temperature:
- id: 6
HSSI_0_1 dts1 Temperature:
- id: 2
- id: 3
adjustment: -1.5
- id: 4
adjustment: -1.5
- id: 5
adjustment: -1.5
HSSI_0_1 dts2 Temperature:
- id: 3
HSSI_0_1 dts3 Temperature:
- id: 4
HSSI_0_1 dts4 Temperature:
- id: 5
FPGA Virtual Temperature Sensor 0:
- id: 0x8000
|
variables:
VSIM: vsim-10.7b -64
VLIB: vlib-10.7b
VLOG: vlog-10.7b -64
SYNOPSYS_DC: synopsys-2019.12 dc_shell -64bit
before_script:
- export PATH=~/.cargo/bin:$PATH
- mkdir -p build
vsim:
stage: build
script:
- >
if ! memora lookup vsim; then
cd build && ../scripts/compile_vsim.sh
memora insert vsim
fi
synopsys_dc:
stage: build
script:
- >
if ! memora lookup synopsys_dc; then
cd build && ../scripts/synth.sh
memora insert synopsys_dc
fi
.test_module: &test_module
stage: test
script:
- >
if ! memora lookup $TEST_MODULE; then
memora get vsim
cd build && ../scripts/run_vsim.sh $TEST_MODULE
memora insert $TEST_MODULE
fi
axi_addr_test:
<<: *test_module
variables:
TEST_MODULE: axi_addr_test
axi_atop_filter:
<<: *test_module
variables:
TEST_MODULE: axi_atop_filter
axi_cdc:
<<: *test_module
variables:
TEST_MODULE: axi_cdc
axi_delayer:
<<: *test_module
variables:
TEST_MODULE: axi_delayer
axi_dw_downsizer:
<<: *test_module
variables:
TEST_MODULE: axi_dw_downsizer
axi_dw_upsizer:
<<: *test_module
variables:
TEST_MODULE: axi_dw_upsizer
axi_isolate:
<<: *test_module
variables:
TEST_MODULE: axi_isolate
axi_lite_regs:
<<: *test_module
variables:
TEST_MODULE: axi_lite_regs
axi_lite_to_apb:
<<: *test_module
variables:
TEST_MODULE: axi_lite_to_apb
axi_lite_to_axi:
<<: *test_module
variables:
TEST_MODULE: axi_lite_to_axi
axi_lite_mailbox:
<<: *test_module
variables:
TEST_MODULE: axi_lite_mailbox
axi_lite_xbar:
<<: *test_module
variables:
TEST_MODULE: axi_lite_xbar
axi_modify_address:
<<: *test_module
variables:
TEST_MODULE: axi_modify_address
axi_serializer:
<<: *test_module
variables:
TEST_MODULE: axi_serializer
axi_to_axi_lite:
<<: *test_module
variables:
TEST_MODULE: axi_to_axi_lite
axi_xbar:
<<: *test_module
variables:
TEST_MODULE: axi_xbar
|
<gh_stars>1-10
resources:
- grafana.f110.dev_grafanas.yaml
- grafana.f110.dev_grafanausers.yaml
- harbor.f110.dev_harborrobotaccounts.yaml
- harbor.f110.dev_harborprojects.yaml
- minio.f110.dev_miniobuckets.yaml
- minio.f110.dev_miniousers.yaml
- consul.f110.dev_consulbackups.yaml
|
# Human readable task name
name: H1
# Long form description.
description: |+
Crypto is not real hacking, they say.
# The flag
flag: CTF{But_in_real_life_devs_would_never_use_such_a_buggy_RNG_right?}
# Task category. (one of hw, crypto, pwn, rev, web, net, misc)
category: crypto
# === the fields below will be filled by SRE or automation ===
# Task label
label: ''
# URL for web challenges
link: ''
# host/port for non-web challenges
host: ''
# the URL for attachments, to be filled in by automation
attachment: ''
# is this challenge released? Will be set by SREs
visible: false
|
<gh_stars>10-100
name: symbiyosys-tests
on: [push, pull_request]
jobs:
build:
runs-on: ubuntu-18.04
steps:
- uses: actions/checkout@v2
with:
lfs: true
- name: Install SymbiYosys
run: |
wget https://github.com/open-tool-forge/fpga-toolchain/releases/download/nightly-20201104/fpga-toolchain-linux_x86_64-nightly-20201104.tar.xz
tar -xf fpga-toolchain-linux_x86_64-nightly-20201104.tar.xz
rm fpga-toolchain-linux_x86_64-nightly-20201104.tar.xz
echo "$(pwd)/fpga-toolchain/bin" >> $GITHUB_PATH
- name: Run tests using SymbiYosys
run: |
cd formal
./run.sh
|
name: cgra_rtl_sim_compile
commands:
- bash compile_rtl_testbench.sh
inputs:
- design.v
outputs:
- xcelium.d
- xrun.log
- libcgra.so
parameters:
array_width: 12
array_height: 12
axi_addr_width: 13
axi_data_width: 32
glb_tile_mem_size: 256
clock_period: 1.2
top_name: top
postconditions:
- assert File( 'outputs/xcelium.d' )
- assert File( 'outputs/xrun.log' )
- assert File( 'outputs/libcgra.so' )
|
# Use full-chip agents so as not to hamper per-check-in jobs
agents: { queue: "papers" }
env:
GOLD: /build/glb${BUILDKITE_BUILD_NUMBER}/full_chip
# OVERRIDE_MFLOWGEN_BRANCH: placementCheck
steps:
# Build mflowgen framework for full-chip
- label: 'setup'
commands:
- 'source mflowgen/bin/setup-buildkite.sh --dir $$GOLD --need_space 10G;
mflowgen run --design $$GARNET_HOME/mflowgen/full_chip;
set -x;
grep mem_size $$GARNET_HOME/mflowgen/glb_top/construct.py;
grep mem_size .mflowgen/*glb_top/configure.yml;
grep mem_size -r .mflowgen;
'
- wait: ~
# Build RTL from scratch
- label: 'rtl'
commands:
- 'source mflowgen/bin/setup-buildkite.sh --dir $$GOLD --need_space 10G;
echo "--- MAKE RTL"; set -o pipefail;
make rtl |& tee make-rtl.log'
- wait: ~
# make glb_top
# Note: "echo exit 13" prevents hang at genus/innovus prompt, allows clean fail
- label: 'glb_top'
commands:
- 'source mflowgen/bin/setup-buildkite.sh --dir $$GOLD --need_space 10G;
echo "--- MAKE GLB_TOP"; set -o pipefail;
echo exit 13 | make glb_top |& tee make-glb_top.log'
- wait: ~
|
name: mc-gen-sram
commands:
- bash gen_srams.sh
outputs:
- sram.v
- sram_pwr.v
- sram.lef
- sram_tt.lib
- sram_tt.db
- sram.gds
- sram.spi
parameters:
sram_word_size: 144
sram_num_words: 1024
sram_mux_size: 4
sram_corner: "tt0p8v25c"
sram_partial_write: True
|
<filename>mflowgen/glb_top/sim/configure.yml
name: sim
commands:
- bash run_sim.sh
inputs:
- design.v
- glb_tile.v
outputs:
- sim.log
postconditions:
- assert File( 'outputs/sim.log' ) # must exist
# Basic error checking
- assert '*E,' not in File( 'outputs/sim.log' )
|
variables:
VSIM: vsim-10.6b -64
VLIB: vlib-10.6b
VMAP: vmap-10.6b
VCOM: vcom-10.6b -64
VLOG: vlog-10.6b -64
VOPT: vopt-10.6b -64
before_script:
- export PATH="~fschuiki/.cargo/bin/":$PATH
- |
echo 'plugins:' > Bender.local
echo ' bender-vsim: { git: "<EMAIL>:fschuiki/bender-vsim.git", rev: master }' >> Bender.local
stages:
- build
- test
compile:
stage: build
script:
- bender vsim
test_fifo:
stage: test
script:
# workaround to prevent simulation from failing due to assertions
- bender vsim -t test --vlog-flag="+define+VERILATOR"
- cd build
- $VSIM -c work.fifo_tb -do "run -a; quit -code [coverage attribute -name TESTSTATUS -concise]" -GDEPTH=0
- $VSIM -c work.fifo_tb -do "run -a; quit -code [coverage attribute -name TESTSTATUS -concise]" -GDEPTH=13
- $VSIM -c work.fifo_tb -do "run -a; quit -code [coverage attribute -name TESTSTATUS -concise]" -GDEPTH=32 -GFALL_THROUGH=1
test_stream_register:
stage: test
script:
# workaround to prevent simulation from failing due to assertions
- bender vsim -t test --vlog-flag="+define+VERILATOR"
- cd build
- $VSIM -c work.stream_register_tb -do "run -a; quit -code [coverage attribute -name TESTSTATUS -concise]"
test_cdc_2phase:
stage: test
script:
- bender vsim -t test
- cd build
- $VOPT cdc_2phase_tb -o cdc_2phase_tb_opt -GUNTIL=1000000
- $VSIM -c cdc_2phase_tb_opt -do "run -a; quit -code [coverage attribute -name TESTSTATUS -concise]"
test_cdc_fifo:
stage: test
script:
- bender vsim -t test
- cd build
- for g in 0 1; do for i in 1 2 3 4 5; do $VOPT cdc_fifo_tb -o cdc_fifo_tb_depth${i}_gray${g} -GDEPTH=$i -GGRAY=$g; done; done
- for g in 0 1; do for i in 1 2 3 4 5; do $VSIM -c cdc_fifo_tb_depth${i}_gray${g} -do "run -a; quit -code [coverage attribute -name TESTSTATUS -concise]"; done; done
test_graycode:
stage: test
script:
- bender vsim -t test
- cd build
- for i in 1 2 3 4 8 16; do $VOPT graycode_tb -o graycode_tb_$i -GN=$i; done
- for i in 1 2 3 4 8 16; do $VSIM -c graycode_tb_$i -do "run -a; quit -code [coverage attribute -name TESTSTATUS -concise]"; done
|
sim.inputs.top_module: "SVM"
sim.inputs.tb_dut: "dut"
sim.inputs.tb_name: "SVM_tb"
sim.inputs.input_files_meta: "append"
sim.inputs.input_files:
- "src/SVM/SVM.sv"
- "src/SVM/SVM_tb.sv"
- "src/SVM/multiply_quantize.sv"
sim.inputs:
timescale: "1ns/1ps"
options:
- "-notice"
- "-line"
- "-debug_pp"
- "-debug_all"
- "+v2k"
- "+lint=all,noVCDE"
- "+incdir+../../src/SVM"
- "+define+CLOCK_PERIOD=15"
- "-sverilog"
execute_sim: true
execution_flags: ["+verbose=1"]
|
<reponame>mgielda/hwt<filename>.travis.yml
dist: trusty
language: python
git:
depth: 1
cache:
apt: true
python:
- '3.6'
install:
- export PYTHON_VERSION=$(python -c "import sys;v = sys.version_info;print('%d.%d.%d' % (v[0], v[1], v[2]))")
- export PYTHON_VERSION_SHORT=$(python -c "import sys;v = sys.version_info;print('%d.%d' % (v[0], v[1]))")
- DIST_PACKAGES=/home/travis/virtualenv/python$PYTHON_VERSION/lib/python$PYTHON_VERSION_SHORT/site-packages
- pip3 install coveralls
- python3 setup.py develop
- cd ..
- git clone --depth=1 https://github.com/Nic30/pyDigitalWaveTools.git
- cd pyDigitalWaveTools
- python3 setup.py install
- cd ..
- git clone --depth=1 https://github.com/Nic30/ipCorePackager.git
- cd ipCorePackager
- python3 setup.py install
- cd ..
- git clone --depth=1 https://github.com/Nic30/hwtLib.git
- cd hwt
- rm -rf $DIST_PACKAGES/hwt-*
- ln -s hwt $DIST_PACKAGES/hwt
script:
- coverage run --branch --source=hwt ../hwtLib/setup.py test
deploy:
provider: pypi
user: nic30
password:
secure: <KEY>
on:
tags: true
branch: master
after_success:
- coveralls
|
PROJECT:
dt: 0.1e-6
board_name: ZC706
plugins: ['msdsl']
emu_clk_freq: 10e6
flatten_hierarchy': 'none'
vivado_stack: 2000
ila_depth: 4096
# cpu_debug_mode: 1
# cpu_debug_hierarchies: [[0, "top"]]
|
<filename>uloop-example/code.yml
#
# code.yml
# <NAME> <<EMAIL>>
#
# Copyright (C) 2017-2019 ETH Zurich, University of Bologna
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# See LICENSE.sw.txt for details.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# LOOP0 loop_stream_inner: for jj in range(0,nif/TP)
# LOOP1 loop_filter_x : for k in range(0,fs)
# LOOP2 loop_filter_y : for l in range(0,fs)
# LOOP3 loop_stream_outer: for ii in range(0,nof/TP)
# LOOP4 loop_spatial_x : for m in range(0,ow)
# LOOP5 loop_spatial_y : for n in range(0,oh)
# mnemonics to simplify microcode writing
mnemonics:
W: 0
x: 1
y: 2
x_major: 3
nif: 4
nof: 5
ow_X_nof: 6
w_X_nif: 7
TPin: 8
nif_X_fs2: 9
TPout: 10
x_iter: 11
x_maj_iter: 12
zero: 14
TP2: 15
# actual microcode
code:
loop_stream_inner: # for k_in_major in range(0, N_in/TP)
- { op : add, a: W, b: TPin }
- { op : add, a: x, b: TPin }
loop_filter_x: # for u_j in range(0, fs)
- { op : add, a: W, b: TPin }
- { op : add, a: x, b: TPin }
loop_filter_y: # for u_i in range(0, fs)
- { op : add, a: W, b: TPin }
- { op : add, a: x, b: x_iter }
loop_stream_outer: # for k_out_major in range(0, N_out/TP)
- { op : add, a: W, b: TPin }
- { op : add, a: y, b: TPout }
- { op : mv, a: x, b: x_major }
loop_spatial_x: # for j in range(0, w_out)
- { op : mv, a: W, b: zero }
- { op : add, a: y, b: TPout }
- { op : add, a: x_major, b: nif }
- { op : mv, a: x, b: x_major }
loop_spatial_y: # for i in range(0, h_out)
- { op : mv, a: W, b: zero }
- { op : add, a: y, b: TPout }
- { op : add, a: x_major, b: x_maj_iter }
- { op : mv, a: x, b: x_major }
|
<reponame>ess-dmsc/dmg-build-scripts
---
- hosts: data-generators
gather_facts: False
tasks:
- name: start multigrid data
command: "{{daemonize_cmd}} {{script_path}}/datagen_multiblade.bash"
tags:
- multiblade
|
<reponame>pan185/UnarySim<gh_stars>1-10
# This file defines single architecture set for tlut systolic array performance projection
- proj_16_16_bank4_block16
- proj_32_32_bank4_block16
- proj_64_64_bank4_block16
- proj_128_128_bank4_block16
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.