Search is not available for this dataset
content
stringlengths 0
376M
|
---|
<reponame>ashoward/ipbus-firmware<filename>ci/publish.yml
save_build_results:
stage: publish
image: gitlab-registry.cern.ch/ci-tools/ci-web-deployer:latest
only:
- /^pull-requests.[0-9]+$/
- master
- tags
- web
variables:
CI_OUTPUT_DIR: "build_results/"
OUTPUT_ROOT_DIR: "/eos/user/i/ipbusci/fw-gitlab-ci"
before_script:
- yum -y install openssh-clients
- export EOS_PATH=${OUTPUT_ROOT_DIR}/commits/${CI_COMMIT_TAG:-${CI_COMMIT_SHA}}/pipeline${CI_PIPELINE_ID}
- if [ -n "${CI_COMMIT_TAG}" ]; then export EOS_PATH=${EOS_PATH/commits/tags} ; fi
script:
- env | grep -v PASSWORD | grep -v TOKEN | sort
- echo "${EOS_ACCOUNT_PASSWORD}" | kinit ${<EMAIL>
- klist
- ssh -F ${CI_PROJECT_DIR}/ci/ssh_config ${EOS_ACCOUNT_USERNAME}@lxplus "rm -rf ${EOS_PATH} && mkdir -p ${EOS_PATH}"
- ssh -F ${CI_PROJECT_DIR}/ci/ssh_config ${EOS_ACCOUNT_USERNAME}@lxplus "ln -sfnv ${EOS_PATH} `dirname ${EOS_PATH}`/latest"
- kdestroy
- ls -l ${CI_OUTPUT_DIR} ${CI_OUTPUT_DIR}/*
- ls -l ${CI_OUTPUT_DIR}/*/*
- deploy-eos
|
<filename>.github/workflows/main.yml
name: 'main'
on:
push:
pull_request:
jobs:
tests:
runs-on: ubuntu-latest
strategy:
matrix:
TEST_CASE:
- onenet
- OneNetModport
- OneFF
- OneAnd
- OneDivider
- OneNetInterf
- OneNetRange
- OneConcat
- OneReplicate
- OneAlwaysComb
- OneShift
- OneArithShift
- OneSysFunc
- OneInside
- OneImport
- OneCast
- OnePackage
- OneStruct
- fsm_single_always
- fsm_using_function
- fsm_using_always
- AluOps
- UnitForLoop
- GenerateAssigns
- AssignmentPattern
- MultipleCells
- IndexedPartSelect
- PartSelect
- VarSelect
- EnumInPackage
- StructInPackage
TARGET:
- uhdm/yosys/test-ast
- uhdm/verilator/test-ast
exclude:
- TEST_CASE: EnumInPackage
TARGET: uhdm/verilator/test-ast
- TEST_CASE: StructInPackage
TARGET: uhdm/verilator/test-ast
- TEST_CASE: OneThis
TARGET: uhdm/verilator/test-ast
fail-fast:
false
env:
CC: gcc-9
CXX: g++-9
steps:
- name: Install dependencies
run: |
sudo apt-get update -qq
sudo apt install -y g++-9 build-essential cmake tclsh ant default-jre swig google-perftools libgoogle-perftools-dev python3 python3-dev uuid uuid-dev tcl-dev flex libfl-dev
- uses: actions/checkout@v2
with:
submodules: recursive
fetch-depth: 0
- uses: actions/setup-python@v2
with:
python-version: '3.x'
- name: Build & Test
run: ./.github/ci.sh
env:
TEST_CASE: tests/${{ matrix.TEST_CASE }}
TARGET: ${{ matrix.TARGET }}
surelog-uhdm:
runs-on: ubuntu-latest
env:
CC: gcc-9
CXX: g++-9
steps:
- name: Install dependencies
run: |
sudo apt-get update -qq
sudo apt install -y g++-9 build-essential cmake tclsh ant default-jre swig google-perftools libgoogle-perftools-dev python3 python3-dev uuid uuid-dev tcl-dev flex libfl-dev
- uses: actions/checkout@v2
with:
submodules: recursive
fetch-depth: 0
- uses: actions/setup-python@v2
with:
python-version: '3.x'
- name: Build & Test
run: ./.github/ci.sh
env:
MODE: surelog-uhdm
ibex_synth:
runs-on: ubuntu-latest
strategy:
matrix:
TARGET: [uhdm/yosys/synth-ibex]
fail-fast:
false
env:
CC: gcc-9
CXX: g++-9
steps:
- name: Install dependencies
run: |
sudo apt-get update -qq
sudo apt install -y g++-9 build-essential cmake tclsh ant default-jre swig google-perftools libgoogle-perftools-dev python3 python3-dev uuid uuid-dev tcl-dev flex libfl-dev
- uses: actions/checkout@v2
with:
submodules: recursive
fetch-depth: 0
- uses: actions/setup-python@v2
with:
python-version: '3.7'
- name: Build & Test
run: |
pip install virtualenv
virtualenv venv-ibex
. venv-ibex/bin/activate
pip install -r tests/ibex/ibex/python-requirements.txt
cd tests/ibex/ibex
fusesoc --cores-root=. run --target=synth --setup lowrisc:ibex:top_artya7 --part xc7a35ticsg324-1L
cd ../../..
./.github/ci.sh
env:
TARGET: uhdm/yosys/synth-ibex
TEST_CASE: tests/ibex
|
pip_test:
- src_wire: CLBLM_R_X11Y93/CLBLM_L_D3
dst_wire: SLICE_X15Y93.SLICEL/D3
pip_chain_test:
- wires:
- $CONSTANTS_X0Y0.$CONSTANTS/$GND_SOURCE
- $CONSTANTS_X0Y0/$GND_NODE
- TIEOFF_X3Y145.TIEOFF/$GND_SITE_WIRE
- TIEOFF_X3Y145.TIEOFF/HARD0GND_HARD0
- INT_R_X3Y145/GND_WIRE
- wires:
- $CONSTANTS_X0Y0.$CONSTANTS/$VCC_SOURCE
- $CONSTANTS_X0Y0/$VCC_NODE
- TIEOFF_X3Y145.TIEOFF/$VCC_SITE_WIRE
- TIEOFF_X3Y145.TIEOFF/HARD1VCC_HARD1
- INT_R_X3Y145/VCC_WIRE
- wires:
- $CONSTANTS_X0Y0.$CONSTANTS/$VCC_SOURCE
- $CONSTANTS_X0Y0/$VCC_NODE
- SLICE_X3Y145.SLICEL/$VCC_SITE_WIRE
- SLICE_X3Y145.SLICEL/CEUSEDVCC_HARD1
- wires:
- $CONSTANTS_X0Y0.$CONSTANTS/$GND_SOURCE
- $CONSTANTS_X0Y0/$GND_NODE
- SLICE_X3Y145.SLICEL/$GND_SITE_WIRE
- SLICE_X3Y145.SLICEL/SRUSEDGND_HARD0
bel_pin_test:
- bel: SLICE_X15Y93.SLICEL/D6LUT
pin: A3
wire: SLICE_X15Y93.SLICEL/D3
- bel: $CONSTANTS_X0Y0.$CONSTANTS/GND
pin: G
wire: $CONSTANTS_X0Y0.$CONSTANTS/$GND_SOURCE
- bel: $CONSTANTS_X0Y0.$CONSTANTS/VCC
pin: P
wire: $CONSTANTS_X0Y0.$CONSTANTS/$VCC_SOURCE
|
<reponame>CansWang/open-source-phy_SKY130
name: rtl
commands:
- |
mkdir -p outputs
cp -r $VLOG_HOME/digital_top ./outputs
cat "./outputs/digital_top/digital_top.sv" "./outputs/digital_top/prbs_generator_syn.sv" "./outputs/digital_top/qr_4t1_mux_top.sv" "./outputs/digital_top/hr_16t4_mux_top.sv" "./outputs/digital_top/output_buffer.sv"> "./outputs/design.v"
python gen_tcl.py
outputs:
- design.v
- read_design.tcl
|
title: pixhdl
email: <EMAIL>
description: >- # this means to ignore newlines until "baseurl:"
A command-line tool that produces graphical representations of entities from VHDL source files.
baseurl: "/pixhdl" # the subpath of your site, e.g. /blog
url: "https://kokkonisd.github.io" # the base hostname & protocol for your site, e.g. http://example.com
twitter_username: jimkokko5
github_username: kokkonisd
# Build settings
markdown: kramdown
highlighter: rouge
|
---
category:
- Object oriented
note: Concurrency
requires:
- Concurrency
- Objects
- Mutable State
|
<gh_stars>1-10
theme: jekyll-theme-dinky
title: APB4 Multiplexer
description: Parameterized APB4 Slave multiplexer
show_downloads: true
show_license: true
license: Non-Commercial License
|
<gh_stars>0
# Check NEORV32 software framework and test processor
name: Processor
on:
push:
branches:
- master
paths:
- 'rtl/**'
- 'sw/**'
- 'sim/**'
pull_request:
branches:
- master
paths:
- 'rtl/**'
- 'sw/**'
- 'sim/**'
workflow_dispatch:
jobs:
Processor:
runs-on: ubuntu-latest
name: 'π§ Ubuntu | Shell script'
steps:
- name: 'π§° Repository Checkout'
uses: actions/checkout@v2
- name: 'βοΈ Build Software Framework Tests'
uses: docker://ghcr.io/stnolting/neorv32/sim
with:
args: ./do.py BuildAndInstallSoftwareFrameworkTests
- name: 'π§ Run Processor Hardware Tests with shell script'
uses: docker://ghcr.io/stnolting/neorv32/sim
with:
args: ./sim/simple/ghdl.sh
VUnit-Container:
runs-on: ubuntu-latest
name: 'π³οΈ Container | VUnit'
steps:
- name: 'π§° Repository Checkout'
uses: actions/checkout@v2
- name: 'βοΈ Build and install Processor Check software'
uses: docker://ghcr.io/stnolting/neorv32/sim
with:
args: >-
make -C sw/example/processor_check
clean_all
USER_FLAGS+=-DRUN_CHECK
USER_FLAGS+=-DUART0_SIM_MODE
USER_FLAGS+=-DSUPPRESS_OPTIONAL_UART_PRINT
MARCH=-march=rv32imac
info
all
- name: 'π€ Archive Processor Check application image'
uses: actions/upload-artifact@v2
with:
name: application
path: rtl/core/neorv32_application_image.vhd
- name: 'π§ Run Processor Hardware Tests with VUnit'
uses: VUnit/vunit_action@master
with:
image: ghcr.io/stnolting/neorv32/sim
cmd: ./sim/run.py --ci-mode -v
|
<filename>.travis.yml<gh_stars>0
matrix:
include:
- os: linux
addons:
apt:
sources:
- ubuntu-toolchain-r-test
packages:
- g++-7
- libgmp-dev
- libmpfr-dev
- libmpc-dev
- verilator
env:
- CC=gcc-7
- CXX=g++-7
- os: osx
osx_image: xcode10.2
addons:
homebrew:
packages:
- verilator
# python managed by conda until 3.7 available
# python:
# - '3.6'
install:
# install conda for py 3.7
- |
if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then
wget https://repo.anaconda.com/miniconda/Miniconda3-latest-MacOSX-x86_64.sh -O miniconda.sh
else
# install conda for py 3.7
wget http://repo.continuum.io/miniconda/Miniconda-latest-Linux-x86_64.sh -O miniconda.sh
fi
- chmod +x miniconda.sh
- ./miniconda.sh -b -p $TRAVIS_BUILD_DIR/miniconda
- export PATH=$TRAVIS_BUILD_DIR/miniconda/bin:$PATH
- hash -r
- conda config --set always_yes yes --set changeps1 no
- conda update -q conda
- conda create -q -n test-env python=3.7.3
- source activate test-env
- conda install pip
# End install conda
- curl -s -L https://github.com/rdaly525/coreir/releases/latest | grep "href.*coreir-${TRAVIS_OS_NAME}.tar.gz" | cut -d \" -f 2 | xargs -I {} wget https://github.com"{}"
- mkdir coreir_release;
- tar -xf coreir-${TRAVIS_OS_NAME}.tar.gz -C coreir_release --strip-components 1;
- cd coreir_release && sudo make install && cd ..
- |
if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then
# avoid strange libjpeg error (see https://github.com/sgrif/pq-sys/issues/1
# for some more info)
export DYLD_LIBRARY_PATH=/System/Library/Frameworks/ImageIO.framework/Versions/A/Resources/:/usr/local/lib:$DYLD_LIBRARY_PATH
fi
- pip install pytest-cov pytest-codestyle
- pip install mantle # for tests.common
- pip install -e .
# Begin setup CoSA dependencies
- |
if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then
pysmt-install --msat --confirm-agreement
fi
- pysmt-install --z3 --confirm-agreement
- export PYTHONPATH="/home/travis/.smt_solvers/python-bindings-3.7:${PYTHONPATH}"
- export LD_LIBRARY_PATH="/home/travis/.smt_solvers/python-bindings-3.7:${LD_LIBRARY_PATH}"
- pysmt-install --check
# End setup CoSA dependencies
script:
- pytest --cov fault --codestyle fault -v --cov-report term-missing tests
deploy:
provider: script
script: /bin/bash .travis/deploy.sh
skip_cleanup: true
on:
tags: true
branch: master
|
<gh_stars>0
jobs:
- single_inflight_00_01_sz_zero:
variables:
- MKDV_COCOTB_MODULE: fwnoc_tests.single_inflight_00_01_sz_zero
- single_inflight_b2b_sz_zero:
variables:
- MKDV_COCOTB_MODULE: fwnoc_tests.single_inflight_b2b_sz_zero
- single_inflight_b2b_sz_one:
variables:
- MKDV_COCOTB_MODULE: fwnoc_tests.single_inflight_b2b_sz_one
- single_inflight_b2b_sz_two:
variables:
- MKDV_COCOTB_MODULE: fwnoc_tests.single_inflight_b2b_sz_two
- single_inflight_b2b_sz_four:
variables:
- MKDV_COCOTB_MODULE: fwnoc_tests.single_inflight_b2b_sz_four
- single_inflight_p2p_sz_zero:
variables:
- MKDV_COCOTB_MODULE: fwnoc_tests.single_inflight_p2p_sz_zero
- single_inflight_p2p_sz_one:
variables:
- MKDV_COCOTB_MODULE: fwnoc_tests.single_inflight_b2b_sz_one
- single_inflight_p2p_sz_two:
variables:
- MKDV_COCOTB_MODULE: fwnoc_tests.single_inflight_b2b_sz_two
- single_inflight_p2p_sz_four:
variables:
- MKDV_COCOTB_MODULE: fwnoc_tests.single_inflight_b2b_sz_four
- two_inflight_mirror_target_sz_zero:
variables:
- MKDV_COCOTB_MODULE: fwnoc_tests.two_inflight_mirror_target_sz_zero
- two_inflight_mirror_target_sz_one:
variables:
- MKDV_COCOTB_MODULE: fwnoc_tests.two_inflight_mirror_target_sz_one
- two_inflight_mirror_target_sz_two:
variables:
- MKDV_COCOTB_MODULE: fwnoc_tests.two_inflight_mirror_target_sz_two
|
# Copyright 2019 Xilinx Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
description: ssd pedestrian detector.
input size: 360*640
float ops: 5.9G
task: detection
framework: caffe
prune: '0.97'
version: 2.0
files:
- name: cf_ssdpedestrian_coco_360_640_0.97_5.9G_2.0
type: float & quantized
board: GPU
download link: https://www.xilinx.com/bin/public/openDownload?filename=cf_ssdpedestrian_coco_360_640_0.97_5.9G_2.0.zip
checksum: f32a8b7891f589a1d0e4af9bb80eae35
- name: ssd_pedestrian_pruned_0_97
type: xmodel
board: zcu102 & zcu104 & kv260
download link: https://www.xilinx.com/bin/public/openDownload?filename=ssd_pedestrian_pruned_0_97-zcu102_zcu104_kv260-r2.0.0.tar.gz
checksum: 71f17a0a58894989ee2919b599a30fe4
- name: ssd_pedestrian_pruned_0_97
type: xmodel
board: vck190
download link: https://www.xilinx.com/bin/public/openDownload?filename=ssd_pedestrian_pruned_0_97-vck190-r2.0.0.tar.gz
checksum: 54d04816be68a44b788bb23c48236fb1
- name: ssd_pedestrian_pruned_0_97
type: xmodel
board: vck50006pe-DPUCVDX8H-DWC
download link: https://www.xilinx.com/bin/public/openDownload?filename=ssd_pedestrian_pruned_0_97-vck50006pe-DPUCVDX8H-DWC-r2.0.0.tar.gz
checksum: a36dec9392cf905ca7040ab70ebe106c
- name: ssd_pedestrian_pruned_0_97
type: xmodel
board: vck50008pe-DPUCVDX8H
download link: https://www.xilinx.com/bin/public/openDownload?filename=ssd_pedestrian_pruned_0_97-vck50008pe-DPUCVDX8H-r2.0.0.tar.gz
checksum: b21fa5ff5baf7fbc4a1c1ecc3f705261
- name: ssd_pedestrian_pruned_0_97
type: xmodel
board: u50lv-DPUCAHX8H
download link: https://www.xilinx.com/bin/public/openDownload?filename=ssd_pedestrian_pruned_0_97-u50lv-DPUCAHX8H-r2.0.0.tar.gz
checksum: 8637450ffb454a96864fd47fe3a9dec6
- name: ssd_pedestrian_pruned_0_97
type: xmodel
board: u50lv-DPUCAHX8H-DWC & u55c-DPUCAHX8H-DWC
download link: https://www.xilinx.com/bin/public/openDownload?filename=ssd_pedestrian_pruned_0_97-u55c-u50lv-DPUCAHX8H-DWC-r2.0.0.tar.gz
checksum: 33df413d2a0a52a633a717b828545c6b
license: https://github.com/Xilinx/Vitis-AI/blob/master/LICENSE
|
<filename>vlog_netlist/osc/osc_design/rtl/configure.yml
name: rtl
commands:
- |
mkdir -p outputs
python gen_tcl.py
outputs:
- design.v
- read_design.tcl
|
<filename>documentation/metadata/an002.yaml
---
title: Standard Calling Convention
author:
- Felix "xq" QueiΓner
date: April 28, 2020
abstract: The Standard Calling Convention (<i>SCC</i>) is the default calling convention used for all <i>SPU Mark II</i> code. |
<gh_stars>100-1000
# .github/release.yml
changelog:
exclude:
labels:
- ignore-for-release
categories:
- title: Documentation Changes
labels:
- Component:Doc
- title: RTL Changes
labels:
- Component:RTL
|
<gh_stars>1-10
KerasJson: /media/data/projects/landsat_soil_classifier/models/ANN100x100.json
KerasH5: /media/data/projects/landsat_soil_classifier/models/ANN100x100_weights.h5
InputData: /media/data/projects/landsat_soil_classifier/data/Landsat_x_test.dat
OutputPredictions: /media/data/projects/landsat_soil_classifier/data/Landsat_y_test.dat
OutputDir: /media/data/projects/landsat_soil_classifier/fpga/hls_ANN100x100
ProjectName: fpga_ANN100x100
XilinxPart: xazu7eg-fbvb900-1-i
ClockPeriod: 24
IOType: io_parallel
HLSConfig:
Model:
Precision: ap_fixed<16,10>
ReuseFactor: 100
Strategy: Resource
|
sim.inputs.top_module: "SVM"
sim.inputs.tb_dut: "dut"
sim.inputs.tb_name: "SVM_tb"
sim.inputs.input_files_meta: "append"
sim.inputs.input_files:
- "src/SVM_SRAM/SVM.sv"
- "src/SVM_SRAM/SVM_tb.sv"
- "src/SVM_SRAM/multiply_quantize.sv"
- "src/SVM_SRAM/SVM_memories_214.sv"
sim.inputs:
timescale: "1ns/1ps"
options:
- "-notice"
- "-line"
- "-debug_pp"
- "-debug_all"
- "+v2k"
- "+lint=all,noVCDE"
- "+incdir+../../src/SVM"
- "+define+CLOCK_PERIOD=454"
- "-sverilog"
execute_sim: true
execution_flags: ["+verbose=1"]
|
ariane:
incdirs: [
include,
]
files: [
include/riscv_pkg.sv,
src/debug/dm_pkg.sv,
include/ariane_pkg.sv,
include/std_cache_pkg.sv,
src/util/instruction_tracer_if.sv,
src/util/instruction_tracer_pkg.sv,
src/alu.sv,
src/ariane.sv,
src/branch_unit.sv,
src/cache_ctrl.sv,
src/commit_stage.sv,
src/compressed_decoder.sv,
src/controller.sv,
src/csr_buffer.sv,
src/csr_regfile.sv,
src/decoder.sv,
src/ex_stage.sv,
src/frontend/btb.sv,
src/frontend/bht.sv,
src/frontend/ras.sv,
src/frontend/instr_scan.sv,
src/frontend/frontend.sv,
src/icache.sv,
src/id_stage.sv,
src/instr_realigner.sv,
src/issue_read_operands.sv,
src/issue_stage.sv,
src/lfsr.sv,
src/load_unit.sv,
src/lsu_arbiter.sv,
src/lsu.sv,
src/miss_handler.sv,
src/mmu.sv,
src/mult.sv,
src/nbdcache.sv,
src/vdregs.sv,
src/std_cache_subsystem.sv,
src/sram_wrapper.sv,
src/pcgen_stage.sv,
src/perf_counters.sv,
src/ptw.sv,
src/re_name.sv,
src/scoreboard.sv,
src/store_buffer.sv,
src/store_unit.sv,
src/tlb.sv,
src/debug/dm_csrs.sv,
src/debug/dm_mem.sv,
src/debug/dm_top.sv,
src/debug/dmi_cdc.sv,
src/debug/dmi_jtag.sv,
src/debug/dmi_jtag_tap.sv,
]
riscv_regfile_rtl:
targets: [
rtl,
]
incdirs: [
include,
]
files: [
src/ariane_regfile.sv,
]
riscv_regfile_fpga:
targets: [
xilinx,
]
incdirs: [
include,
]
files: [
src/ariane_regfile_ff.sv,
]
|
name: Kernel-Auto-Release
on:
workflow_dispatch:
inputs:
commit_id:
description: 'Commit ID'
required: true
default: 'HEAD'
version_number:
description: 'Version Number (Ex. 10.4.4)'
required: true
default: '10.4.4'
main_br_version:
description: "Version String for task.h on main branch (leave empty to leave as-is)."
require: false
default: ''
jobs:
release-packager:
name: Release Packager
runs-on: ubuntu-latest
steps:
# Install python 3
- name: Tool Setup
uses: actions/setup-python@v2
with:
python-version: 3.7.10
architecture: x64
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
# Currently FreeRTOS/.github/scripts houses the release script. Download it for upcoming usage
- name: Checkout FreeRTOS Release Tools
uses: actions/checkout@v2
with:
repository: FreeRTOS/FreeRTOS
path: tools
# Simpler git auth if we use checkout action and forward the repo to release script
- name: Checkout FreeRTOS Kernel
uses: actions/checkout@v2
with:
path: local_kernel
fetch-depth: 0
- name: Release
run: |
# Configure repo for push
git config --global user.name ${{ github.actor }}
git config --global user.email ${{ github.actor }}@users.noreply.github.com
# Install deps and run
pip install -r ./tools/.github/scripts/release-requirements.txt
./tools/.github/scripts/release.py FreeRTOS --kernel-repo-path=local_kernel --kernel-commit=${{ github.event.inputs.commit_id }} --new-kernel-version=${{ github.event.inputs.version_number }} --new-kernel-main-br-version=${{ github.event.inputs.main_br_version }}
exit $?
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
<reponame>WukLab/Clio<gh_stars>10-100
#
# This workflow will compile the code and report compiling errors
#
name: C CI
on: [pull_request]
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: prepare
run: sudo apt-get -y install rdma-core libibverbs1 libibverbs-dev
- name: make_host_monitor
run: cd host && make
|
from: latex
to: markdown_github+table_captions+footnotes
standalone: true
atx-headers: true
shift-heading-level-by: 1 # Added title will be level 1
default-image-extension: png
file-scope: true
toc: true
toc-depth: 3 # Actual depth = 2 as Title added post processing
include-in-header: config/frontmatter.md
|
# Copyright 2018 <NAME>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
dist: trusty
sudo: false
language: cpp
os: linux
cache:
directories:
- $HOME/archive
- $HOME/tools
env: C_COMPILER=gcc-7 CXX_COMPILER=g++-7
addons:
apt:
sources:
- george-edison55-precise-backports
- ubuntu-toolchain-r-test
packages:
- cmake cmake-data
- gcc-7 g++-7
before_script:
- export CC="$C_COMPILER -std=gnu11"
- export CXX="$CXX_COMPILER -std=gnu++11"
- source ./.travis/setup.sh
script:
- mkdir -p build && cd build
- |
cmake \
-DCMAKE_C_COMPILER=$C_COMPILER \
-DCMAKE_CXX_COMPILER=$CXX_COMPILER \
-DLOGIC_WARNINGS_INTO_ERRORS=ON \
..
- cmake --build . --target all -- -j`nproc`
- ctest
matrix:
include:
- env: C_COMPILER=gcc-7 CXX_COMPILER=g++-7
addons:
apt:
sources:
- george-edison55-precise-backports
- ubuntu-toolchain-r-test
packages:
- cmake cmake-data
- g++-7
- env: C_COMPILER=gcc-6 CXX_COMPILER=g++-6
addons:
apt:
sources:
- george-edison55-precise-backports
- ubuntu-toolchain-r-test
packages:
- cmake cmake-data
- g++-6
- env: C_COMPILER=gcc-5 CXX_COMPILER=g++-5
addons:
apt:
sources:
- george-edison55-precise-backports
- ubuntu-toolchain-r-test
packages:
- cmake cmake-data
- g++-5
- env: C_COMPILER=gcc-4.9 CXX_COMPILER=g++-4.9
addons:
apt:
sources:
- george-edison55-precise-backports
- ubuntu-toolchain-r-test
packages:
- cmake cmake-data
- g++-4.9
- env: C_COMPILER=clang-5.0 CXX_COMPILER=clang++-5.0
addons:
apt:
sources:
- george-edison55-precise-backports
- ubuntu-toolchain-r-test
- llvm-toolchain-trusty-5.0
packages:
- cmake cmake-data
- clang-5.0
- env: C_COMPILER=clang-4.0 CXX_COMPILER=clang++-4.0
addons:
apt:
sources:
- george-edison55-precise-backports
- ubuntu-toolchain-r-test
- llvm-toolchain-trusty-4.0
packages:
- cmake cmake-data
- clang-4.0
- env: C_COMPILER=clang-3.9 CXX_COMPILER=clang++-3.9
addons:
apt:
sources:
- george-edison55-precise-backports
- ubuntu-toolchain-r-test
- llvm-toolchain-trusty-3.9
packages:
- cmake cmake-data
- clang-3.9
- env: C_COMPILER=clang-3.8 CXX_COMPILER=clang++-3.8
addons:
apt:
sources:
- george-edison55-precise-backports
- ubuntu-toolchain-r-test
- llvm-toolchain-precise-3.8
packages:
- cmake cmake-data
- clang-3.8
- env: C_COMPILER=clang-3.7 CXX_COMPILER=clang++-3.7
addons:
apt:
sources:
- george-edison55-precise-backports
- ubuntu-toolchain-r-test
- llvm-toolchain-precise-3.7
packages:
- cmake cmake-data
- clang-3.7
- env: C_COMPILER=clang-3.6 CXX_COMPILER=clang++-3.6
addons:
apt:
sources:
- george-edison55-precise-backports
- ubuntu-toolchain-r-test
- llvm-toolchain-precise-3.6
packages:
- cmake cmake-data
- clang-3.6
- env: C_COMPILER=clang-3.5 CXX_COMPILER=clang++-3.5
addons:
apt:
sources:
- george-edison55-precise-backports
- ubuntu-toolchain-r-test
- llvm-toolchain-precise-3.5
packages:
- cmake cmake-data
- clang-3.5
|
<gh_stars>0
package:
name: mgr_soc
author:
- "<NAME> <<EMAIL>>"
export_include_dirs:
- uart
- ibex
sources:
#level 0
- ibex/ibex_pkg.sv
- ibex/addr_map_pkg.sv
- i2c/i2c_package.sv
#level 1
- ibex/ibex_if.sv
- wishbone/wishbone_if.sv
#level 2
#i2c
- i2c/i2c_bit_controller.sv
- i2c/i2c_byte_controller.sv
- i2c/i2c_master_top.sv
#ram
- ram/1p_ram.sv
- ram/2p_ram.sv
#gpio
- gpio/gpio.sv
#spi
- spi/fifo.sv
- spi/spi_master.sv
- spi/spi_slave.sv
#uart
#- uart/uart_receiver.v
#- uart/uart_regs.v
#- uart/uart_rfifo.v
#- uart/uart_sync_flops.v
#- uart/uart_tfifo.v
#- uart/uart_top.v
#- uart/uart_transmitter.v
#- uart/uart_wb.v
#- uart/raminfr.v
#uart_moj
- uart_moj/wb_uart.sv
- uart_moj/uart.sv
- uart_moj/uart_rx.sv
- uart_moj/uart_tx.sv
#wishbone
- wishbone/ibex_to_wb.sv
- ibex/ibex_wb.sv
- wishbone/slave_to_wb.sv
- wishbone/wb_1p_ram.sv
- wishbone/wb_2p_ram.sv
- wishbone/wb_i2c_master.sv
- wishbone/wb_led.sv
- wishbone/wb_spi_master.sv
- wishbone/wb_timer.sv
- wishbone/wb_uart.sv
- wishbone/wishbone_sharedbus.sv
# ibex
- ibex/ibex_alu.sv
- ibex/ibex_compressed_decoder.sv
- ibex/ibex_controller.sv
- ibex/ibex_core.sv
- ibex/ibex_counters.sv
- ibex/ibex_cs_registers.sv
- ibex/ibex_decoder.sv
- ibex/ibex_ex_block.sv
- ibex/ibex_fetch_fifo.sv
- ibex/ibex_icache.sv
- ibex/ibex_id_stage.sv
- ibex/ibex_if_stage.sv
- ibex/ibex_load_store_unit.sv
- ibex/ibex_multdiv_fast.sv
- ibex/ibex_multdiv_slow.sv
- ibex/ibex_pmp.sv
- ibex/ibex_prefetch_buffer.sv
- ibex/ibex_register_file_ff.sv
- ibex/ibex_wb_stage.sv
- ibex/prim_assert.sv
- timer/timer.sv
- xilinx/clkgen_xil7series.sv
- xilinx/prim_clock_gating.sv
- soc/ibex_soc.sv
- target: test_blink
files:
- dv/top.sv
- target: test_gpio
files:
- dv/top.sv
- target: test_uart
files:
- dv/uart/tb_uart.sv
- dv/uart/uart_seq_item.sv
- dv/uart/uart_seq.sv
- dv/uart/uart_driver.sv
- dv/uart/uart_mon.sv
- dv/uart/uart_cov.sv
- dv/uart/uart_agent.sv
- dv/uart/uart_env.sv
- dv/uart/uart_intf.sv
- dv/uart/uart_test.sv |
hier-icache:
incdirs: [
../../rtl/includes,
]
files: [
RTL/TOP/icache_hier_top.sv,
RTL/L1_CACHE/pri_icache_controller.sv,
RTL/L1_CACHE/refill_arbiter.sv,
RTL/L1_CACHE/pri_icache.sv,
RTL/L1_CACHE/register_file_1w_multi_port_read.sv,
RTL/L1_CACHE/register_file_1w_multi_port_read_test_wrap.sv,
RTL/L1.5_CACHE/AXI4_REFILL_Resp_Deserializer.sv,
RTL/L1.5_CACHE/share_icache.sv,
RTL/L1.5_CACHE/icache_controller.sv,
RTL/L1.5_CACHE/RefillTracker_4.sv,
RTL/L1.5_CACHE/REP_buffer_4.sv,
RTL/L1.5_CACHE/ram_ws_rs_data_scm.sv,
RTL/L1.5_CACHE/ram_ws_rs_tag_scm.sv,
CTRL_UNIT/hier_icache_ctrl_unit.sv,
CTRL_UNIT/hier_icache_ctrl_unit_wrap.sv,
]
|
<filename>ips/apb/apb_uart/src_files.yml
apb_uart:
files: [
apb_uart.vhd,
slib_clock_div.vhd,
slib_counter.vhd,
slib_edge_detect.vhd,
slib_fifo.vhd,
slib_input_filter.vhd,
slib_input_sync.vhd,
slib_mv_filter.vhd,
uart_baudgen.vhd,
uart_interrupt.vhd,
uart_receiver.vhd,
uart_transmitter.vhd,
]
|
################################################################
#
# Copyright (c) #YEAR# #LICENSOR#. All rights reserved.
#
# The information and source code contained herein is the
# property of #LICENSOR#, and may not be disclosed or
# reproduced in whole or in part without explicit written
# authorization from #LICENSOR#.
#
# * Filename : amplifier.ifc.yaml
# * Author : <NAME> (<EMAIL>)
# * Description: SV template for an amplifier cell
#
# * Note :
# -
#
# * Todo :
# -
#
# * Revision :
# - 00/00/00 :
#
################################################################
module_name: amplifier
description: Interface for generic amplifier
pin:
vdd:
name: vdd
description: power supply
direction: input
datatype: pwl
vss:
name: vss
description: ground
direction: input
datatype: pwl
is_optional: True
inp:
name: inp
description: positive input
direction: input
datatype: pwl
inn:
name: inn
description: negative input
direction: input
datatype: pwl
#is_optional: True
outp:
name: outp
description: positive output
direction: output
datatype: pwl
outn:
name: outn
description: negative output
direction: output
datatype: pwl
#is_optional: True
metric: # optional behaviors to be incorporated in a model
compression:
description: Gain compression behavior
filter:
description: Implement filter for dynamic behavior
value: # user will choose one in the list
- p1
- p2
- p2z1
modelparam:
v_os:
description: input-referred static offset voltage
datatype: real
value: 0.0
etol_v_icm:
description: resolution of input common-mode voltage (inp+inn)/2.0
datatype: real
value: 0.01
etol_vdd:
description: resolution of vdd
datatype: real
value: 0.01
etol_f:
description: error tolerance of a filter
datatype: real
value: 0.0001
testparam:
f_bin:
description: frequency bin in doing fft for test1
value: 10e6
f_max:
description: max. frequency of interest for test1
value: 10e9
|
<filename>.github/workflows/test.yml
name: 'test'
on:
push:
pull_request:
schedule:
- cron: '0 0 * * 5'
workflow_dispatch:
jobs:
fomu-toolchain:
strategy:
fail-fast: false
max-parallel: 3
matrix:
include:
- { icon: π§, os: Ubuntu }
- { icon: π§, os: Windows }
- { icon: π, os: macOS }
runs-on: ${{ matrix.os }}-latest
name: '${{ matrix.icon}} ${{ matrix.os }} | fomu-toolchain'
defaults:
run:
shell: bash
steps:
- run: git config --global core.autocrlf input
shell: bash
- uses: actions/checkout@v2
with:
submodules: true
fetch-depth: 0
- uses: actions/setup-python@v2
with:
python-version: 3.8
- name: Install Scala
uses: olafurpg/setup-scala@v10
with:
java-version: openjdk@1.11
- name: Install (Ubuntu)
if: matrix.os == 'ubuntu'
run: |
curl -L https://ziglang.org/download/0.8.0/zig-linux-x86_64-0.8.0.tar.xz | tar -xJf -
echo "$(pwd)/zig-linux-x86_64-0.8.0" >> $GITHUB_PATH
- name: Install (Mac OS)
if: matrix.os == 'macos'
run: brew install zig
- name: Install (Windows)
if: matrix.os == 'windows'
run: |
choco install zig --version 0.8.0
ln -s $(which python) /usr/bin/python3
- run: python ./get-toolchain.py
env:
GH_TOKEN: ${{ github.token }}
- run: ./.github/tests.sh
all-in-one:
name: 'π³οΈ Container | All-in-one'
runs-on: ubuntu-latest
env:
GHDL_PLUGIN_MODULE: ghdl
steps:
- uses: actions/checkout@v2
with:
submodules: true
fetch-depth: 0
- uses: docker://ghcr.io/hdl/debian-buster/impl
with:
args: ./.github/hdl-tests.sh
fine-grained:
name: 'π³οΈ Container | Fine-grained'
runs-on: ubuntu-latest
env:
GHDL_PLUGIN_MODULE: ghdl
CONTAINER_ENGINE: docker
steps:
- uses: actions/checkout@v2
with:
submodules: true
fetch-depth: 0
- name: Pull container images
run: |
docker pull hdlc/ghdl:yosys
docker pull hdlc/nextpnr:ice40
docker pull hdlc/icestorm
- run: ./.github/hdl-tests.sh
msys2:
runs-on: windows-latest
strategy:
fail-fast: false
max-parallel: 2
matrix:
include: [
{icon: 'πͺ', installs: 'MINGW32', arch: i686 },
{icon: 'π¦', installs: 'MINGW64', arch: x86_64 },
]
name: '${{ matrix.icon }} MSYS2 | ${{ matrix.installs }}'
defaults:
run:
shell: msys2 {0}
steps:
- name: '${{ matrix.icon }} Setup MSYS2'
uses: msys2/setup-msys2@v2
with:
msystem: ${{ matrix.installs }}
update: true
install: >
make
mingw-w64-${{ matrix.arch }}-icestorm
mingw-w64-${{ matrix.arch }}-yosys
mingw-w64-${{ matrix.arch }}-nextpnr
- run: git config --global core.autocrlf input
shell: bash
- uses: actions/checkout@v2
with:
submodules: true
fetch-depth: 0
- run: ./.github/hdl-tests.sh
|
<reponame>Thales-RISC-V/fpnew
package:
name: FPnew
authors: ["<NAME> <<EMAIL>>"]
dependencies:
common_cells: {git: "https://github.com/pulp-platform/common_cells.git", rev: 32dd115}
fpu_div_sqrt_mvp: {git: "https://github.com/pulp-platform/fpu_div_sqrt_mvp.git", rev: e10ab86}
sources:
- src/fpnew_pkg.sv
- src/fpnew_cast_multi.sv
- src/fpnew_classifier.sv
- src/fpnew_divsqrt_multi.sv
- src/fpnew_fma.sv
- src/fpnew_fma_multi.sv
- src/fpnew_noncomp.sv
- src/fpnew_opgroup_block.sv
- src/fpnew_opgroup_fmt_slice.sv
- src/fpnew_opgroup_multifmt_slice.sv
- src/fpnew_pipe_in.sv
- src/fpnew_pipe_out.sv
- src/fpnew_pipe_inside_fma.sv
- src/fpnew_pipe_inside_cast.sv
- src/fpnew_rounding.sv
- src/fpnew_top.sv
|
# Copyright 2020 ETH Zurich and University of Bologna.
# Licensed under the Apache License, Version 2.0, see LICENSE for details.
# SPDX-License-Identifier: Apache-2.0
package:
name: snitch_vm
authors:
- <NAME> <<EMAIL>>
- <NAME> <<EMAIL>>
dependencies:
common_cells: {path: ../../vendor/pulp_platform_common_cells}
snitch: {path: ../../ip/snitch}
sources:
# Level 0:
- src/snitch_ptw.sv
# - target: test
# files:
# - test/snitch_ptw_tb.sv
|
<reponame>the-moog/hdlConvertor<filename>.circleci/config.yml
version: 2.1
orbs:
python: circleci/python@0.2.1
py: nic30/python-all-in-1@0.2.47
jobs:
install-test-coverage:
# executor: python/default
resource_class: medium
docker:
- image: ubuntu:rolling
steps:
- checkout
- run:
command: |
apt update
# because of tzdata
export DEBIAN_FRONTEND=noninteractive
export TZ=America/USA
apt update
apt install -y build-essential libantlr4-runtime-dev antlr4 cmake ninja-build python3-pip python3-dev python3 python3-pip git lcov rubygems
# because of coverage (tool named coverage)
echo 'export PATH=$PATH:/root/.local/bin' >> $BASH_ENV
update-alternatives --install /usr/bin/python python /usr/bin/python3 1
source $BASH_ENV
- run:
command: |
update-alternatives --install /usr/bin/python python /usr/bin/python3 10;
update-alternatives --install /usr/bin/pip pip /usr/bin/pip3 10;
- run: git submodule sync
- run: git submodule update --init
# - python/load-cache
- run:
command: |
gcc -v;
python --version;
cmake --version;
free;
- run: pip install jupyter
- run: gem install coveralls-lcov
#- run:
# command: |
# echo export CXXFLAGS=\"$CXXFLAGS --param ggc-min-expand=1 --param ggc-min-heapsize=32768\" >> "$BASH_ENV"
- py/install-setup-py:
rm-package-dir: true
build-args: "--user -j1 --build-type Debug -- -DCODE_COVERAGE=ON" # because we do not have enough RAM
# - python/save-cache
- py/test-and-coverage:
coverage: false
- run:
name: Filter lcov outputs for scikit-build and upload it to coveralls.io
command: |
if [ -d "removed_because_of_coverage_$MODULE_NAME" ]; then
mv "removed_because_of_coverage_$MODULE_NAME" $MODULE_NAME;
fi
if [ -z "$CIRCLE_PULL_REQUEST" ]; then
lcov --compat-libtool --directory . --capture --output-file coverage.info.raw;
lcov --remove coverage.info.raw -o coverage.info '/usr/*' '*_skbuild/*';
sed -i 's/\/root\/project\///g' coverage.info;
sed -i 's/\/root\///g' coverage.info;
coveralls-lcov coverage.info --repo-token ${COVERALLS_REPO_TOKEN};
else
echo "[INFO] Skipping the coverage upload because this is a pull request $CIRCLE_PULL_REQUEST"
fi
install-test-deploy:
# executor: python/default
resource_class: medium
docker:
- image: ubuntu:rolling
steps:
- checkout: {}
- run:
command: |
apt update
# because of tzdata
export DEBIAN_FRONTEND=noninteractive
export TZ=America/USA
apt update
apt install -y build-essential libantlr4-runtime-dev antlr4 cmake ninja-build python3-pip python3-dev python3 python3-pip git lcov rubygems
# because of coverage (tool named coverage)
echo 'export PATH=$PATH:/root/.local/bin' >> $BASH_ENV
update-alternatives --install /usr/bin/python python /usr/bin/python3 1
source $BASH_ENV
- run:
command: |
update-alternatives --install /usr/bin/python python /usr/bin/python3 10;
update-alternatives --install /usr/bin/pip pip /usr/bin/pip3 10;
- run: git submodule sync
- run: git submodule update --init
# - python/load-cache
- run:
command: |
gcc -v;
python --version;
cmake --version;
free;
- run: pip install jupyter
- run: gem install coveralls-lcov
#- run:
# command: |
# echo export CXXFLAGS=\"$CXXFLAGS --param ggc-min-expand=1 --param ggc-min-heapsize=32768\" >> "$BASH_ENV"
- py/install-setup-py:
rm-package-dir: true
build-args: "--user -j1" # because we do not have enough RAM
# - python/save-cache
- py/test-and-coverage:
coverage: false
- py/deploy-pypi-on-tag:
bin: false
workflows:
main:
jobs:
- install-test-coverage:
filters:
tags:
only: /.*/
- install-test-deploy:
requires:
- install-test-coverage
context:
- pypi
filters:
tags:
only: /.*/
|
<filename>Lib/site-packages/wx-2.8-msw-unicode/wx/tools/Editra/tests/syntax/yaml.yaml
# Syntax Highlighting Test file for YAML
# Some Comments about this file
---!myDocument
logEvent: Hello World
date: 2007-08-06
customer:
given: Joe
family: Bazooka
greet: &id001
location: |
In Front of you
greeting:
- first: Hello
second: World
- first: Buy my gum
second: I give you Comics
specialGreeting: >
This is a text section of a yaml document all
the (xxx:) items are identifiers.
...
|
<filename>pythondata_cpu_cv32e41p/system_verilog/src_files.yml
# Copyright 2020 ETH Zurich
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
cv32e41p_regfile_rtl:
targets: [
rtl,
tsmc55,
gf22,
]
incdirs: [
./rtl/include,
]
files: [
./rtl/cv32e41p_register_file_ff.sv,
]
cv32e41p:
incdirs: [
./rtl/include,
../../rtl/includes,
]
files: [
./rtl/include/cv32e41p_apu_core_pkg.sv,
./rtl/include/cv32e41p_fpu_pkg.sv,
./rtl/include/cv32e41p_pkg.sv,
./bhv/include/cv32e41p_tracer_pkg.sv,
./rtl/cv32e41p_alu.sv,
./rtl/cv32e41p_alu_div.sv,
./rtl/cv32e41p_ff_one.sv,
./rtl/cv32e41p_popcnt.sv,
./rtl/cv32e41p_compressed_decoder.sv,
./rtl/cv32e41p_controller.sv,
./rtl/cv32e41p_cs_registers.sv,
./rtl/cv32e41p_decoder.sv,
./rtl/cv32e41p_int_controller.sv,
./rtl/cv32e41p_ex_stage.sv,
./rtl/cv32e41p_hwloop_regs.sv,
./rtl/cv32e41p_id_stage.sv,
./rtl/cv32e41p_if_stage.sv,
./rtl/cv32e41p_load_store_unit.sv,
./rtl/cv32e41p_mult.sv,
./rtl/cv32e41p_prefetch_buffer.sv,
./rtl/cv32e41p_prefetch_controller.sv,
./rtl/cv32e41p_obi_interface.sv,
./rtl/cv32e41p_aligner.sv,
./rtl/cv32e41p_sleep_unit.sv,
./rtl/cv32e41p_core.sv,
./rtl/cv32e41p_apu_disp.sv,
./rtl/cv32e41p_fifo.sv
]
cv32e41p_vip_rtl:
targets: [
rtl,
]
incdirs: [
./rtl/include,
]
files: [
./bhv/cv32e41p_sim_clock_gate.sv,
./bhv/cv32e41p_wrapper.sv,
./bhv/cv32e41p_tracer.sv,
./bhv/cv32e41p_core_log.sv,
./bhv/cv32e41p_apu_tracer.sv,
]
defines: [
CV32E40P_TRACE_EXECUTION,
CV32E40P_APU_TRACE
]
flags: [
skip_synthesis,
]
cv32e41p_regfile_rtl:
targets: [
rtl,
tsmc55,
gf22,
]
incdirs: [
./rtl/include,
]
files: [
./rtl/cv32e41p_register_file_latch.sv,
]
cv32e41p_regfile_verilator:
targets: [
verilator,
]
files: [
./rtl/cv32e41p_register_file_ff.sv,
]
cv32e41p_regfile_fpga:
targets: [
xilinx,
]
incdirs: [
./rtl/include,
]
files: [
./rtl/cv32e41p_register_file_ff.sv,
]
|
<gh_stars>10-100
name: CI
on:
# Triggers the workflow on push or pull request events but only for the master branch
push:
branches: [ master ]
pull_request:
branches: [ master ]
# Allows you to run this workflow manually from the Actions tab
workflow_dispatch:
jobs:
Deploy:
runs-on: ubuntu-latest
if: "contains(github.event.head_commit.message, '*deploy*')"
steps:
- name: Maximize build space
uses: easimon/maximize-build-space@master
with:
root-reserve-mb: 20480
swap-size-mb: 8192
remove-dotnet: 'true'
- name: Checkout Code
uses: actions/checkout@v2
- name: Login to GitHub Container Registry
uses: docker/login-action@v1
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ <PASSWORD> }}
- name: Build and Push Docker Image
uses: docker/build-push-action@v2
with:
context: .
file: utils/container/Dockerfile
push: true # Will only build if this is not here
tags: |
ghcr.io/spcl/pspin:latest
|
<gh_stars>1-10
cryoAsicGen1:
enable: True
ForceWrite: False
EpixHRGen1Cryo:
enable: True
CryoAsic0:
enable: True
TPS_DAC: 0x3c
TPS_GR: 0x1
TPSMux: 0x0
Bias_TPS_Buffer: 0x5
Bias_TPS: 0x4
Bias_TPS_DAC: 0x4
Bias_LVDS_Rx: 0x0
Bias_LVDS_Tx: 0x0
RbiasEn: False
Pulser: 0x370
test: True
atest: False
hrtest: False
sbatest: False
pbit: False
Pulser_Reset: False
PPbit: True
test_BE: False
DelEXEC: False
DelCCKreg: False
sync_exten: False
sync_role_sel: True
RO_Bk0_disable: False
RO_Bk1_disable: False
DM1en: False
DM2en: False
Pulser_Monost: 0x0
cs_LVDS_Tx: 0x0
DCycle_en: False
DCycle_bypass: False
DCycle_polarity: False
DCycle_DAC: 0x20
Bias_DCycle_DAC: 0x4
PLL_RO_OutDivider: 0x0
PLL_DCycle_Bypass_B0: 0x3
PLL_RO_Reset: True
PLL_RO_Itune: 0x3
PLL_RO_KVCO: 0x1
PLL_RO_filter1: 0x5
PLL_RO_filter2: 0x4
Dcycle_DAC_gain: 0x3
VTBias_B0: 0x2
VTBias_T0: 0xe
SAH_B0: 0x0
SAH_VcmBuf_B0: 0x0
SigBuf_B0: 0x0
ADC_VrefBuf_B0: 0x0
ADC_B0: 0x3
ADC_VcmBuf_B0: 0x0
bamp: 0x4
bleak: 0x2
brstVref: 0x4
SAH_Ctrl_Visel: False
ADC_Ocen_Bk0: True
ADC_Ocen_Bk1: True
VrefBuffExt_En_Bk0: True
VrefBuffExt_En_Bk1: True
ROsLVDS_bit: True
SACIsLVDS_bit: True
emph_bc: 0x0
emph_bd: 0x0
DM1sel: 0x0
DM2sel: 0x0
SubBnkEn: 0xFFFF
LDO_VTBias_B0: 0x3
LDO_VTBias_T0: 0xa
LDO_VTBias_Br0: 0x0
LDO_VTBias_Tr0: 0x5
TPS_DAC_Gain: 0x3
LDO0TB_En: True
LDO0rTB_En: True
LDO1TB_En: True
LDO2TB_En: True
LDO46_En: True
encoder_mode_dft: 0x0
En_BankClk_Bk0_LVDS: False
En_BankClk_Bk1_LVDS: False
En_SerClk_out_Bk0_LVDS: True
En_SerClk_out_Bk1_LVDS: True
rtrimLVDS_b0: 0x2
SACItristateLVDS_bit: False
VrefGen_B0_1v2: 0x3
VrefGen_Br0_1v2: 0x3
VrefGen_T0_1v2: 0xf
VrefGen_Tc0_1v2: 0xf
VrefGen_Tr0_1v2: 0x0
VrefBuf_Ext_B0_1v2: 0x3
VcmBuf_Ext_B0_1v2: 0x0
FE_Amp_B0_1v2: 0x2
bifb: 0x3
bbaseref: 0x3
blcoarse: 0x6
ctrl_pulser: False
Pulser_Bias_DAC_b0: 0x4
PP_Pulser_Bias_DAC: True
Pulser_Bias_Monost_b0: 0x3
ensdps: False
enrefps: False
Ana_Mon_Cal: False
ADC_B3B2: 0x2
CRYO_ID: 0x0
AppFpgaRegisters:
enable: True
Version: 0x2
GlblRstPolarity: True
GlblRstDelay: 0
GlblRstWidth: 0
AcqPolarity: False
AcqDelay1: 1000
AcqWidth1: 100000
AcqDelay2: 0
AcqWidth2: 0
TPulsePolarity: False
TPulseDelay: 0
TPulseWidth: 0
StartPolarity: False
StartDelay: 0
StartWidth: 0
PPbePolarity: False
PPbeDelay: 0
PPbeWidth: 0
SampClkEn: False
SyncPolarity: True
SyncDelay: 0
SyncWidth: 0
SaciSyncPolarity: False
SaciSyncDelay: 0
SaciSyncWidth: 0
SR0Polarity: False
Vid: 0
ResetCounters: False
AsicPwrEnable: True
AsicPwrManual: False
AsicPwrManualDig: False
AsicPwrManualAna: False
AsicPwrManualIo: False
AsicPwrManualFpga: False
DebugSel1: 9
DebugSel2: 3
StartupReq: True
|
<filename>.github/workflows/vs.yml
name: Visual Studio Build
on: [push, pull_request]
jobs:
yosys-vcxsrc:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Cache sources
id: cache-sources
uses: actions/cache@v2
with:
path: .
key: cache-yosys
- name: Build
run: make vcxsrc YOSYS_VER=latest
- uses: actions/upload-artifact@v2
with:
name: vcxsrc
path: yosys-win32-vcxsrc-latest.zip
build:
runs-on: windows-2019
needs: yosys-vcxsrc
steps:
- uses: actions/download-artifact@v2
with:
name: vcxsrc
path: .
- name: unzip
run: unzip yosys-win32-vcxsrc-latest.zip
- name: setup-msbuild
uses: microsoft/setup-msbuild@v1
- name: MSBuild
working-directory: yosys-win32-vcxsrc-latest
run: msbuild YosysVS.sln /p:PlatformToolset=v142 /p:Configuration=Release /p:WindowsTargetPlatformVersion=10.0.17763.0
|
SA_BB:
type: Path
SA_OS:
type: Path
SA_RA:
type: Path
SA_CT:
type: Path
BB_LA:
type: Path
parameters:
cell1_conduction_time: 0.03
cell2_conduction_time: 0.03
LA_LA1:
type: Path
parameters:
cell1_conduction_time: 0.04
cell2_conduction_time: 0.04
RA_RA1:
type: Path
RA1_CS:
type: Path
CT_CT1:
type: Path
OS_Fast:
type: Path
Fast_Fast1:
type: Path
Fast1_AV:
type: Path
parameters:
cell1_conduction_time: 0.01
cell2_conduction_time: 0.01
OS_Slow:
type: Path
parameters:
cell1_conduction_time: 0.03
cell2_conduction_time: 0.03
Slow_Slow1:
type: Path
parameters:
cell1_conduction_time: 0.03
cell2_conduction_time: 0.03
Slow1_AV:
type: Path
parameters:
cell1_conduction_time: 0.015
cell2_conduction_time: 0.015
AV_His:
type: Path
parameters:
cell1_conduction_time: 0.03
cell2_conduction_time: 0.03
His_His1:
type: Path
His1_His2:
type: Path
His2_LBB:
type: Path
LBB_LBB1:
type: Path
parameters:
cell1_conduction_time: 0.005
cell2_conduction_time: 0.005
LBB1_LVA:
type: Path
parameters:
cell1_conduction_time: 0.005
cell2_conduction_time: 0.005
His2_RBB:
type: Path
RBB_RBB1:
type: Path
parameters:
cell1_conduction_time: 0.005
cell2_conduction_time: 0.005
RBB1_RVA:
type: Path
parameters:
cell1_conduction_time: 0.005
cell2_conduction_time: 0.005
LVA_RVA:
type: Path
parameters:
cell1_conduction_time: 0.005
cell2_conduction_time: 0.005
LVA_LV:
type: Path
LV_LV1:
type: Path
parameters:
cell1_conduction_time: 0.03
cell2_conduction_time: 0.03
LVA_LVS:
type: Path
parameters:
cell1_conduction_time: 0.015
cell2_conduction_time: 0.015
LVS_LVS1:
type: Path
parameters:
cell1_conduction_time: 0.015
cell2_conduction_time: 0.015
LVS1_CSLV:
type: Path
RVA_RV:
type: Path
parameters:
cell1_conduction_time: 0.01
cell2_conduction_time: 0.01
RV_RV1:
type: Path
RVA_RVS:
type: Path
parameters:
cell1_conduction_time: 0.015
cell2_conduction_time: 0.015
RVS_RVS1:
type: Path
|
name: Tile_MemCore
commands:
- bash get_Tile_MemCore_outputs.sh
inputs:
- design.v
outputs:
- Tile_MemCore_tt.lib
- Tile_MemCore_tt.db
- Tile_MemCore.lef
- Tile_MemCore.gds
- Tile_MemCore.vcs.v
- Tile_MemCore.vcs.pg.v
- Tile_MemCore.lvs.v
- Tile_MemCore.sdf
- Tile_MemCore.pt.sdc
- Tile_MemCore.spef.gz
- sram.spi
- sram.v
- sram-pwr.v
- sram_tt.db
postconditions:
- assert File( 'outputs/Tile_MemCore_tt.lib' ) # must exist
- assert File( 'outputs/Tile_MemCore_tt.db' ) # must exist
- assert File( 'outputs/Tile_MemCore.lef' ) # must exist
- assert File( 'outputs/Tile_MemCore.gds' ) # must exist
- assert File( 'outputs/Tile_MemCore.vcs.v' ) # must exist
- assert File( 'outputs/Tile_MemCore.vcs.pg.v' ) # must exist
- assert File( 'outputs/Tile_MemCore.pt.sdc' ) # must exist
- assert File( 'outputs/Tile_MemCore.spef.gz' ) # must exist
- assert File( 'outputs/Tile_MemCore.lvs.v' ) # must exist
- assert File( 'outputs/Tile_MemCore.sdf' ) # must exist
- assert File( 'outputs/sram.spi' ) # must exist
- assert File( 'outputs/sram.v' ) # must exist
- assert File( 'outputs/sram-pwr.v' ) # must exist
- assert File( 'outputs/sram_tt.db' ) # must exist
|
<filename>docs/_data/navigation.yml<gh_stars>10-100
docs_list_title: navigation
docs:
- title: Home
url: index.html
- title: Getting Started
url: getting_started.html
- title: Board Overview
url: overview.html
- title: Accessories
url: accessories.html
- title: Overlays
url: overlays.html
- title: Educational Resources
url: educational_resources.html
- title: Tutorial
url: tutorial.html
- title: Support
url: support.html
- title: FAQs
url: faqs.html
|
<gh_stars>0
mchan:
incdirs: [
./rtl/include,
]
files: [
./rtl/misc/mchan_arbiter.sv,
./rtl/misc/mchan_arb_primitive.sv,
./rtl/misc/mchan_rr_flag_req.sv,
./rtl/ctrl_unit/ctrl_fsm.sv,
./rtl/ctrl_unit/ctrl_if.sv,
./rtl/ctrl_unit/ctrl_unit.sv,
./rtl/ctrl_unit/synch_unit.sv,
./rtl/ctrl_unit/trans_allocator.sv,
./rtl/ctrl_unit/trans_queue.sv,
./rtl/ctrl_unit/trans_arbiter_wrap.sv,
./rtl/ctrl_unit/trans_unpack.sv,
./rtl/ctrl_unit/twd_trans_queue.sv,
./rtl/ctrl_unit/twd_trans_splitter.sv,
./rtl/ext_unit/ext_ar_buffer.sv,
./rtl/ext_unit/ext_aw_buffer.sv,
./rtl/ext_unit/ext_b_buffer.sv,
./rtl/ext_unit/ext_buffer.sv,
./rtl/ext_unit/ext_opc_buf.sv,
./rtl/ext_unit/ext_r_buffer.sv,
./rtl/ext_unit/ext_rx_if.sv,
./rtl/ext_unit/ext_tid_gen.sv,
./rtl/ext_unit/ext_tx_if.sv,
./rtl/ext_unit/ext_unit.sv,
./rtl/ext_unit/ext_w_buffer.sv,
./rtl/tcdm_unit/tcdm_cmd_unpack.sv,
./rtl/tcdm_unit/tcdm_rx_if.sv,
./rtl/tcdm_unit/tcdm_synch.sv,
./rtl/tcdm_unit/tcdm_tx_if.sv,
./rtl/tcdm_unit/tcdm_unit.sv,
./rtl/trans_unit/trans_aligner.sv,
./rtl/trans_unit/trans_buffers.sv,
./rtl/trans_unit/trans_unit.sv,
./rtl/top/mchan.sv,
]
jg_slint_top_name: [
mchan
]
jg_slint_elab_opt: [
]
jg_slint_postelab_cmds: [
]
jg_slint_clocks: [
clk_i,
]
jg_slint_resets: [
~rst_ni,
]
|
module: test_stm
sources:
- ../../../interfaces/common/dii_channel.sv
- ../../../blocks/regaccess/common/osd_regaccess.sv
- ../../../blocks/regaccess/common/osd_regaccess_demux.sv
- ../../../blocks/regaccess/common/osd_regaccess_layer.sv
- ../../../interconnect/common/ring_router_mux.sv
- ../../../blocks/timestamp/common/osd_timestamp.sv
- ../../../blocks/buffer/common/osd_fifo.sv
- ../../../blocks/eventpacket/common/osd_event_packetization.sv
- ../../../blocks/eventpacket/common/osd_event_packetization_fixedwidth.sv
- ../../../blocks/tracesample/common/osd_tracesample.sv
- ../common/osd_stm.sv
toplevel: osd_stm
simulators:
- vcs
parameters:
VALWIDTH: 32
REG_ADDR_WIDTH: 5
MAX_PKT_LEN: 12
|
<filename>ivpm.yaml
package:
name: pybfms-spi
version: 0.0.1
deps:
- name: pybfms
src: pypi
- name: cocotb
src: pypi
dev-deps:
- name: pybfms
url: https://github.com/pybfms/pybfms.git
- name: cocotb
src: pypi
|
<reponame>antmicro/verible-coverage<gh_stars>1-10
image:
name: ubuntu:18.04
variables:
GIT_STRATEGY: clone
before_script:
- apt-get update -qq
- apt-get install -y wget git build-essential
- wget https://github.com/bazelbuild/bazel/releases/download/1.1.0/bazel_1.1.0-linux-x86_64.deb
- apt install -y ./bazel_1.1.0-linux-x86_64.deb
- git clone https://github.com/google/verible.git verible
- cd verible && bazel build --noshow_progress --cxxopt='-std=c++17' //...
- cd -
build:
script:
- printf "%s\n%s\n\n" "Verible commit:" "$(git --git-dir=verible/.git log -1 --pretty=oneline)"
- ./test_loop.sh
when: manual
|
package:
name: generic_sram_bfms
version: 0.0.1
deps:
- name: cocotb
type: python
src: pypi
- name: vlsim
url: http://github.com/mballance/vlsim.git
type: python
- name: pybfms
type: python
src: pypi
dev-deps:
- name: cocotb
url: https://github.com/cocotb/cocotb.git
type: python
- name: vlsim
url: http://github.com/mballance/vlsim.git
type: python
- name: pybfms
url: https://github.com/pybfms/pybfms.git
type: python
|
<gh_stars>100-1000
# Copyright cocotb contributors
# Licensed under the Revised BSD License, see LICENSE for details.
# SPDX-License-Identifier: BSD-3-Clause
exclude: "^cocotb/_vendor"
repos:
- repo: "https://github.com/psf/black"
rev: "22.3.0"
hooks:
- id: "black"
args:
- "."
- repo: "https://github.com/pycqa/isort"
rev: "5.10.1"
hooks:
- id: "isort"
args:
- "--profile=black"
- "."
- repo: "https://github.com/pycqa/flake8"
rev: "3.9.2"
hooks:
- id: "flake8"
- repo: "https://github.com/pre-commit/mirrors-clang-format"
rev: "v13.0.0"
hooks:
- id: "clang-format"
exclude: "^cocotb/share/include/(sv_vpi|vhpi|vpi)_user(_ext)?.h"
- repo: "https://github.com/pre-commit/pre-commit-hooks"
rev: "v4.1.0"
hooks:
- id: "trailing-whitespace"
- id: "mixed-line-ending"
args:
- "--fix=lf"
- id: "end-of-file-fixer"
exclude: "^documentation/source/diagrams/(svg|xml)/"
|
<reponame>mfkiwl/garnet
# Build symlinks necessary to do "make postroute_hold"
# using pre-cached collateral from dir $GOLD
agents: { queue: "papers" }
env:
BUILD : /build/prh${BUILDKITE_BUILD_NUMBER}/full_chip
GOLD : /sim/buildkite-agent/gold
steps:
- label: 'hold'
commands:
- echo "--- POSTROUTE_HOLD"
- 'set -o pipefail;
source mflowgen/bin/setup-buildkite.sh --dir $$BUILD --need_space 1G;
mflowgen run --design $$GARNET_HOME/mflowgen/full_chip;
$$GARNET_HOME/mflowgen/bin/get-step-context.sh $$GOLD \
cadence-innovus-postroute_hold'
# ready to do: make cadence-innovus-postroute_hold
|
#
# Derived from
# https://github.com/steveno/ctags/blob/master/.travis.yml
#
language: c
os:
- linux
- osx
compiler:
- gcc
- clang
env:
global:
# The next declaration is the encrypted COVERITY_SCAN_TOKEN, created
# via the "travis encrypt" command using the project repo's public key
- secure: "<KEY>
addons:
coverity_scan:
project:
name: "fishman/ctags"
description: "Build submitted via Travis CI"
notification_email: <EMAIL>
build_command_prepend: "./autogen.sh ; ./configure ; make clean"
build_command: "make -j 4 CFLAGS=-O0"
branch_pattern: master
sudo: false
addons:
apt:
packages:
- lcov
- binutils-mingw-w64-i686
- gcc-mingw-w64-i686
- libjansson-dev
- libyaml-dev
env:
- TARGET=Unix
- TARGET=Mingw32
# Only with gcc get the mingw-w64 cross compilers
before_install:
- if [ $CC = 'gcc' ] && [ $TRAVIS_OS_NAME = 'linux' ]; then gem install lcoveralls; fi
# Build and run tests. Only with gcc cross compile
script:
- ./misc/travis-check.sh
after_success:
- if [ $CC = 'gcc' ] && [ $TRAVIS_OS_NAME = 'linux' ]; then lcov -c -b . -d . -o coverage.info && lcoveralls --root . --retry-count 5 coverage.info; fi
# Build Matrix configuration
# By default Travis CI runs all possible environment/target combinations.
# This setting allows the exclusion of undesired combination.
# For more information:
# http://docs.travis-ci.com/user/customizing-the-build/#Build-Matrix
matrix:
exclude:
- compiler: clang
env: TARGET=Mingw32
- os: osx
env: TARGET=Mingw32
|
name: gls-args
commands:
- echo "+sdfverbose +overlap +multisource_int_delays +neg_tchk -negdelay" > args
- echo "-sdf max:Interconnect:inputs/design.sdf" >> args
- for ((w=0; w<$array_width; w++)); do
for ((h=1; h<=$array_height; h++)); do
TILE=$(printf "Tile_X%02X_Y%02X\n" $w $h);
if (($w%4 == 3)); then
TILETYPE="Tile_MemCore";
else
TILETYPE="Tile_PE";
fi;
echo "-sdf max:Interconnect.$TILE:inputs/$TILETYPE.sdf" >> args;
done;
done
- tr '\n' ' ' < args > outputs/design.args
outputs:
- design.args
parameters:
array_width: 4
array_height: 2
|
<filename>rtl_list.yml<gh_stars>0
#
# List of RTL sources. Contrarily to IPs, these reside in
# the current Git repository.
# Uses the YAML syntax.
# 'domain' refers to the two soc,cluster domains for FPGA
# emulator synthesis
#
tb:
path: tb
vip:
path: vip
pulpissimo:
path: pulpissimo
quicklogic:
path: quicklogic
ql_fcb:
path: ql_fcb
ql_math_unit:
path: ql_math_unit
|
verilog_sources:
sim_ctrl:
files: "sim_ctrl.sv"
fileset: "sim"
defines:
DT_MSDSL:
name: DT_MSDSL
value: 0.1e-6
SIMULATION_MODE_MSDSL:
name: SIMULATION_MODE_MSDSL
fileset: "sim"
|
# Copyright Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
- tool: vcs
compile_cmd:
- "vcs -file <cwd>/vcs.compile.option.f
-f <cwd>/files.f -full64
-l <out>/compile.log
-Mdir=<out>/vcs_simv.csrc
-o <out>/vcs_simv <cmp_opts>"
sim_cmd: >
<out>/vcs_simv +vcs+lic+wait <sim_opts> +ntb_random_seed=<seed>
- tool: ius
compile_cmd:
- "irun -64bit -access +rwc -f <cwd>/files.f
-q -sv -uvm -vlog_ext +.vh -I.
-uvmhome CDNS-1.2
-elaborate
-l <out>/compile.log <cmp_opts>"
sim_cmd: >
irun -R <sim_opts> -svseed <seed>
- tool: questa
compile_cmd:
- "vmap mtiUvm $QUESTA_HOME/questasim/uvm-1.2"
- "vlog -64
-access=rwc
-f <cwd>/files.f
-sv
-mfcu -cuname design_cuname
+define+UVM_REGEX_NO_DPI
-writetoplevels <out>/top.list
-l <out>/compile.log <cmp_opts>"
- "vopt -64 -debug
+designfile -f <out>/top.list
-l <out>/optimize.log <cmp_opts>
-o design_opt"
sim_cmd: >
vsim -64 -c -do <cwd>/questa_sim.tcl design_opt <sim_opts> -sv_seed <seed>
|
# Copyright 2019 Xilinx Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
description: yolov2 detection on voc dataset.
input size: 448*448
float ops: 9.86G
task: detection
framework: darknet
prune: '0.71'
version: 2.0
files:
- name: dk_yolov2_voc_448_448_0.71_9.86G_2.0
type: float & quantized
board: GPU
download link: https://www.xilinx.com/bin/public/openDownload?filename=dk_yolov2_voc_448_448_0.71_9.86G_2.0.zip
checksum: a8d50b9c89a8aa1e7ac261fb0ec04ec4
- name: yolov2_voc_pruned_0_71
type: xmodel
board: zcu102 & zcu104 & kv260
download link: https://www.xilinx.com/bin/public/openDownload?filename=yolov2_voc_pruned_0_71-zcu102_zcu104_kv260-r2.0.0.tar.gz
checksum: 54791b2036161941da69c867ff4b9cd3
- name: yolov2_voc_pruned_0_71
type: xmodel
board: vck190
download link: https://www.xilinx.com/bin/public/openDownload?filename=yolov2_voc_pruned_0_71-vck190-r2.0.0.tar.gz
checksum: bbeb3db4c475fc6d23a2266ceadd456d
- name: yolov2_voc_pruned_0_71
type: xmodel
board: vck50006pe-DPUCVDX8H-DWC
download link: https://www.xilinx.com/bin/public/openDownload?filename=yolov2_voc_pruned_0_71-vck50006pe-DPUCVDX8H-DWC-r2.0.0.tar.gz
checksum: d523e231ed79e15e0508b9c83dd5a05c
- name: yolov2_voc_pruned_0_71
type: xmodel
board: vck50008pe-DPUCVDX8H
download link: https://www.xilinx.com/bin/public/openDownload?filename=yolov2_voc_pruned_0_71-vck50008pe-DPUCVDX8H-r2.0.0.tar.gz
checksum: 3260cd4e06b7e846bf6f29d099652661
- name: yolov2_voc_pruned_0_71
type: xmodel
board: u50lv-DPUCAHX8H
download link: https://www.xilinx.com/bin/public/openDownload?filename=yolov2_voc_pruned_0_71-u50lv-DPUCAHX8H-r2.0.0.tar.gz
checksum: d8bb4567fc065e0b4fc2c1acdfeb8819
- name: yolov2_voc_pruned_0_71
type: xmodel
board: u50lv-DPUCAHX8H-DWC & u55c-DPUCAHX8H-DWC
download link: https://www.xilinx.com/bin/public/openDownload?filename=yolov2_voc_pruned_0_71-u55c-u50lv-DPUCAHX8H-DWC-r2.0.0.tar.gz
checksum: 1461c59c6a281a3f7a266f0b8c9d4e03
license: https://github.com/Xilinx/Vitis-AI/blob/master/LICENSE
|
<filename>.github/workflows/python-static-analysis.yml
name: python static analysis
on:
push:
paths:
- '**.py'
- '.github/workflows/python-static-analysis.yml'
jobs:
analyze:
runs-on: ubuntu-latest
strategy:
matrix:
python-version: [3.6, 3.7, 3.8, 3.9]
steps:
- uses: actions/checkout@v2
- name: Update
run: sudo apt-get update -y
- name: Setup python${{ matrix.python-version }}
uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python-version }}
- name: Install dependencies
run: |
python -m pip install --upgrade pip
python -m pip install pylint flake8 bandit
- name: Pylint lint
run: |
pylint $(find ${{ github.workspace }} -name "*.py") \
-E -f parseable --disable=E0401 --ignore=__init__.py \
--ignore-patterns="test_.*.py" \
| tee ${{ github.workspace }}/pylint.log
- name: Flake8 lint
run: |
flake8 $(find ${{ github.workspace }} -name "*.py") \
--count --show-source --statistics \
| tee ${{ github.workspace }}/flake8.log
continue-on-error: true
- name: Bandit scan
run: |
bandit -r $(find ${{ github.workspace }} -name "*.py") \
--format txt \
| tee ${{ github.workspace }}/bandit.log
bandit -r $(find ${{ github.workspace }} -name "*.py") \
--format csv \
| tee ${{ github.workspace }}/bandit.log.csv
- name: Archive results
uses: actions/upload-artifact@v2
with:
name: static-analysis
path: |
${{ github.workspace }}/pylint.log
${{ github.workspace }}/flake8.log
${{ github.workspace }}/bandit.log
${{ github.workspace }}/bandit.log.csv
|
<gh_stars>100-1000
name: Update opae-sdk
on:
repository_dispatch:
types: update-opae-libs
jobs:
create_pr:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v2
with:
fetch-depth: '0'
- name: Update opae-libs subtree
run: |
git config user.name "OPAE Bot"
git config user.email <EMAIL>
git remote add opae-libs https://github.com/OPAE/opae-libs.git
git fetch --all
git subtree pull --prefix opae-libs opae-libs master --squash
- name: Create Pull Request
id: cpr
uses: peter-evans/create-pull-request@v3
with:
token: ${{ secrets.UPDATER_TOKEN }}
commit-message: Update opae-libs
committer: GitHub <<EMAIL>>
author: ${{ github.actor }} <${{ <EMAIL>>
signoff: false
title: 'ci: merge opae-libs'
body: |
Automated changes by [create-pull-request](https://github.com/peter-evans/create-pull-request) GitHub action.
Must be use a normal merge (not squash-merge and not rebase-merge)
branch: auto/update-libs
delete-branch: true
assignees: r-rojo
reviewers: |
r-rojo
asgardkm
tswhison
|
<gh_stars>10-100
name: CI
on:
push:
pull_request:
jobs:
build:
runs-on: ubuntu-20.04
steps:
- name: Checkout Lemoncore
uses: actions/checkout@v2
with:
submodules: recursive
- name: Update apt repository
run: sudo apt-get update -y
- name: Install packages
run: >-
sudo apt-get install
build-essential
flex
bison
libfl2
libfl-dev
gcc-riscv64-unknown-elf
yosys
libboost-all-dev
libeigen3-dev
libftdi-dev
- name: Install Verilator
run: .ci/install-verilator.sh
- name: Install Google Test
run: .ci/install-gtest.sh
- name: Install Icestorm
run: .ci/install-icestorm.sh
- name: Install NextPNR
run: .ci/install-nextpnr.sh
- name: Install Boolector
run: .ci/install-boolector.sh
- name: Install SymbiYosys
run: .ci/install-symbiyosys.sh
- name: Install bin2coe
run: pip3 install bin2coe
- name: Run tests
run: |
PATH="$HOME/.local/bin:$PATH"
cd formal; ./run.sh; cd ..;
make sim-core;
make test-core;
make test-soc;
make
|
# Built-in variables set by the hammer-vlsi system.
# These settings should not be overridden.
vlsi.builtins:
# Path to the hammer-vlsi directory.
hammer_vlsi_path: ""
|
<reponame>mfkiwl/garnet<filename>mflowgen/full_chip/tile_array/configure.yml
name: tile_array
commands:
- bash get_tile_array_outputs.sh
inputs:
- design.v
outputs:
- tile_array_tt.lib
- tile_array.lef
- tile_array.gds
- tile_array.vcs.v
- tile_array.sdf
- tile_array.lvs.v
- tile_array.sram.spi
- tile_array.sram.v
- tile_array.sram.pwr.v
- tile_array.sram_tt.db
postconditions:
- assert File( 'outputs/tile_array_tt.lib' ) # must exist
- assert File( 'outputs/tile_array.lef' ) # must exist
- assert File( 'outputs/tile_array.gds' ) # must exist
- assert File( 'outputs/tile_array.vcs.v' ) # must exist
- assert File( 'outputs/tile_array.sdf' ) # must exist
- assert File( 'outputs/tile_array.lvs.v' ) # must exist
- assert File( 'outputs/tile_array.sram.spi' ) # must exist
- assert File( 'outputs/tile_array.sram.v' ) # must exist
- assert File( 'outputs/tile_array.sram.pwr.v' ) # must exist
- assert File( 'outputs/tile_array.sram_tt.db' ) # must exist
|
<gh_stars>1-10
name: ci
on:
push:
branches:
- "**"
tags:
- "v*"
jobs:
# This job builds and updates the docker images on the LibSV's Docker Hub. It will create and push a
# new image if any of the following conditions apply:
# 1. This is a new commit to the 'main' branch.
# 2. If a tag for this branch does not already exist on LibSV's Docker Hub.
# 3. If the Dockerfile in the top project directory changed in the most recent commit.
docker:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v2
with:
fetch-depth: 2
- name: Check if Dockerfile changed in last commit
run: |
echo "DOCKERFILE_CHANGED=$(git diff --name-only HEAD~1 HEAD | grep -c Dockerfile)" >> $GITHUB_ENV
- name: Docker meta
id: docker_meta
uses: docker/metadata-action@v3
with:
images: bensampson5/libsv
- name: Check if docker image already exists
run: |
echo '{"experimental": "enabled"}' > ~/.docker/config.json;
if [[ $(docker manifest inspect ${{ steps.docker_meta.outputs.tags }} 2>/dev/null) ]]; then
echo "DOCKER_IMAGE_EXISTS=1" >> $GITHUB_ENV;
else
echo "DOCKER_IMAGE_EXISTS=0" >> $GITHUB_ENV;
fi;
- name: Set up QEMU
if: env.DOCKERFILE_CHANGED == '1' || env.DOCKER_IMAGE_EXISTS == '0' || github.ref == 'refs/heads/main'
uses: docker/setup-qemu-action@v1
- name: Set up Docker Buildx
if: env.DOCKERFILE_CHANGED == '1' || env.DOCKER_IMAGE_EXISTS == '0' || github.ref == 'refs/heads/main'
uses: docker/setup-buildx-action@v1
- name: Cache Docker layers
if: env.DOCKERFILE_CHANGED == '1' || env.DOCKER_IMAGE_EXISTS == '0' || github.ref == 'refs/heads/main'
uses: actions/cache@v2
with:
path: /tmp/.buildx-cache
key: ${{ runner.os }}-buildx-${{ github.sha }}
restore-keys: |
${{ runner.os }}-buildx-
- name: Login to Docker Hub
if: github.event_name != 'pull_request' && (env.DOCKERFILE_CHANGED == '1' || env.DOCKER_IMAGE_EXISTS == '0' || github.ref == 'refs/heads/main')
uses: docker/login-action@v1
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Build and push
if: env.DOCKERFILE_CHANGED == '1' || env.DOCKER_IMAGE_EXISTS == '0' || github.ref == 'refs/heads/main'
id: docker_build
uses: docker/build-push-action@v2
with:
context: .
file: ./Dockerfile
platforms: linux/amd64
push: ${{ github.event_name != 'pull_request' }}
tags: ${{ steps.docker_meta.outputs.tags }}
labels: ${{ steps.docker_meta.outputs.labels }}
cache-from: type=local,src=/tmp/.buildx-cache
cache-to: type=local,dest=/tmp/.buildx-cache
- name: Image digest
if: env.DOCKERFILE_CHANGED == '1' || env.DOCKER_IMAGE_EXISTS == '0' || github.ref == 'refs/heads/main'
run: echo ${{ steps.docker_build.outputs.digest }}
# This job runs all the LibSV builds, tests, and checks
libsv:
needs: [docker]
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Docker meta
id: docker_meta
uses: docker/metadata-action@v3
with:
images: bensampson5/libsv
- name: Setup GitHub environment
run: |
echo "DOCKER_RUN=docker run --rm -v $(pwd):/code bensampson5/libsv:${{ steps.docker_meta.outputs.version }}" >> $GITHUB_ENV;
- name: Pull docker image
run: docker pull bensampson5/libsv:${{ steps.docker_meta.outputs.version }}
- name: Build and run tests
run: ${{ env.DOCKER_RUN }} /code/tools/precommit.py --test
- name: Check format
run: |
${{ env.DOCKER_RUN }} /code/tools/precommit.py --check-format
- name: Lint
run: |
${{ env.DOCKER_RUN }} /code/tools/precommit.py --lint
- name: Build docs
run: |
${{ env.DOCKER_RUN }} /code/tools/precommit.py --docs
- name: Build package
run: |
${{ env.DOCKER_RUN }} /bin/bash -c "cd /code && poetry build"
- name: Publish package
if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags')
uses: pypa/gh-action-pypi-publish@release/v1
with:
user: __token__
password: ${{ secrets.PYPI_TEST_API_TOKEN }}
repository_url: https://test.pypi.org/legacy/
|
<filename>Lucee/docker-compose.yml
version: "3.5"
services:
lucee-helloworld:
build:
context: ./lucee
dockerfile: Dockerfile
restart: always
container_name: helloworld-lucee
ports:
- "8888:8888" # for multiple instances change 32770 to another port number
|
<gh_stars>10-100
digital_ctrl_inputs:
t_lo:
abspath: 'tb_i.t_lo'
width: 32
t_hi:
abspath: 'tb_i.t_hi'
width: 32
digital_ctrl_outputs:
clk_i:
abspath: 'tb_i.clk_i' |
hwpe-color-converter:
vlog_opts: [
+nowarnSVCHK,
]
incdirs: [
rtl,
]
files: [
rtl/control/control.sv,
rtl/control/control_registers.sv,
rtl/rgb2ycbcr/rgb2ycbcr_package.sv,
rtl/rgb2ycbcr/rgb2ycbcr.sv,
rtl/rgb2ycbcr/hwpe_rgb2ycbcr.sv,
rtl/streamer/streamer.sv,
rtl/hwpe_color_converter.sv,
rtl/hwpe_color_converter_wrap.sv,
]
tb_hwpe-color-converter:
flags: [
skip_synthesis,
]
files: [
tb/rgb2ycbcr_tb.sv,
]
|
apiVersion: "v1"
kind: "Pod"
metadata:
name: "rss-site"
labels:
app: "web"
spec:
containers:
- name: "front-end"
image: "nginx"
ports:
- containerPort": "80"
- name: "rss-reader"
image: "nickchase/rss-php-nginx:v1"
ports:
- containerPort": "88"
|
<filename>ara/hardware/deps/tech_cells_generic/Bender.yml
package:
name: tech_cells_generic
description: "Technology-agnostic building blocks."
dependencies:
common_verification: { git: "https://github.com/pulp-platform/common_verification.git", version: 0.2.0 }
sources:
- target: rtl
files:
# level 0
- src/rtl/tc_sram.sv
- target: all(fpga, xilinx)
files:
- src/deprecated/cluster_clk_cells_xilinx.sv
- src/deprecated/pulp_clk_cells_xilinx.sv
- src/fpga/tc_clk_xilinx.sv
- src/fpga/tc_sram_xilinx.sv
- target: not(all(fpga, xilinx))
files:
# Level 0
- src/deprecated/cluster_clk_cells.sv
- src/deprecated/pulp_clk_cells.sv
- src/rtl/tc_clk.sv
- target: not(synthesis)
files:
- src/deprecated/cluster_pwr_cells.sv
- src/deprecated/generic_memory.sv
- src/deprecated/generic_rom.sv
- src/deprecated/pad_functional.sv
- src/deprecated/pulp_buffer.sv
- src/deprecated/pulp_pwr_cells.sv
- src/tc_pwr.sv
- target: test
files:
- test/tb_tc_sram.sv
- src/deprecated/pulp_clock_gating_async.sv
|
<filename>.gitlab-ci.yml
before_script:
- git submodule update --init --recursive
- export LIBRARY_PATH=$LIBRARY_PATH:$CI_PROJECT_DIR/tmp/lib
- export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:$CI_PROJECT_DIR/tmp/lib
- export C_INCLUDE_PATH=$C_INCLUDE_PATH:$CI_PROJECT_DIR/tmp/include
- export CPLUS_INCLUDE_PATH=$CPLUS_INCLUDE_PATH:$CI_PROJECT_DIR/tmp/include
- export VERILATOR_ROOT=$CI_PROJECT_DIR/tmp/verilator-3.918/
variables:
GIT_SUBMODULE_STRATEGY: recursive
stages:
- build
- test
- deploy
build-ci:
stage: build
script:
- make build
- echo $VERILATOR_ROOT
- ci/make-tmp.sh
- ci/install-fesvr.sh
- ci/install-verilator.sh
- ci/build-riscv-tests.sh
artifacts:
paths:
- tmp/
test_alu:
stage: test
script:
- make build library=alu_lib
- make alu library=alu_lib
- vcover-10.6 report alu.ucdb
test_fifo:
stage: test
script:
- make build library=fifo_lib
- make fifo library=fifo_lib
- vcover-10.6 report fifo.ucdb
.test_scoreboard:
stage: test
script:
- make build library=scoreboard_lib
- make scoreboard library=scoreboard_lib
- vcover-10.6 report scoreboard.ucdb
test_store_queue:
stage: test
script:
- make build library=store_queue_lib
- make store_queue library=store_queue_lib
- vcover-10.6 report store_queue.ucdb
test_core_asm:
stage: test
script:
- make build library=core_lib
- make run-asm-tests library=core_lib
- vcover-10.6 report run-asm-tests.ucdb
dependencies:
- build-ci
test_core_asm_verilator:
stage: test
script:
- make run-asm-tests-verilator verilator=$CI_PROJECT_DIR/tmp/bin/verilator
dependencies:
- build-ci
# test with the randomized memory interfaces
.test_core_asm_rand:
stage: test
script:
- make build library=core_rand_lib
# same as above but pass the rand_mem_if flag
- make run-asm-tests library=core_rand_lib uvm-flags=+rand_mem_if
- vcover-10.6 report run-asm-rand-tests.ucdb
dependencies:
- build-ci
.test_failed_tests:
stage: test
script:
- make build library=failed_tests_lib
- make run-failed-tests library=failed_tests_lib
- vcover-10.6 report run-failed-tests.ucdb
.test_lsu:
stage: test
script:
- make build library=lsu_lib
- make lsu library=lsu_lib
- vcover-10.6 report lsu.ucdb
- vcover-10.6 report -html lsu.ucdb
artifacts:
paths:
- covhtmlreport
|
package:
name: event_unit_flex
sources:
include_dirs: ["."]
files:
- event_unit_core.sv
- event_unit_interface_mux.sv
- event_unit_top.sv
- soc_periph_fifo.sv
- interc_sw_evt_trig.sv
- hw_barrier_unit.sv
- hw_mutex_unit.sv
- hw_dispatch.sv
|
# Copyright 2019 Xilinx Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
description: fadnet for disparity estimation on scene flow.
input size: 576*960
float ops: 359G
task: disparity estimation
framework: pytorch
prune: 'no'
version: 2.0
files:
- name: pt_fadnet_sceneflow_576_960_441G_2.0
type: float & quantized
board: GPU
download link: https://www.xilinx.com/bin/public/openDownload?filename=pt_fadnet_sceneflow_576_960_441G_2.0.zip
checksum: 9d9319941a6357177904bc94bea05c5a
- name: FADNet_0_pt
type: xmodel
board: zcu102 & zcu104 & kv260
download link: https://www.xilinx.com/bin/public/openDownload?filename=FADNet_0_pt-zcu102_zcu104_kv260-r2.0.0.tar.gz
checksum: 8bf0bd7cfce4a0716b5bee8e38e6023a
- name: FADNet_1_pt
type: xmodel
board: zcu102 & zcu104 & kv260
download link: https://www.xilinx.com/bin/public/openDownload?filename=FADNet_1_pt-zcu102_zcu104_kv260-r2.0.0.tar.gz
checksum: d6b68b7c5eeabcbb6b11ff5c7457eedf
- name: FADNet_2_pt
type: xmodel
board: zcu102 & zcu104 & kv260
download link: https://www.xilinx.com/bin/public/openDownload?filename=FADNet_2_pt-zcu102_zcu104_kv260-r2.0.0.tar.gz
checksum: d72e724730e2e1a3be799d06aecb5bf4
- name: FADNet_0_pt
type: xmodel
board: vck190
download link: https://www.xilinx.com/bin/public/openDownload?filename=FADNet_0_pt-vck190-r2.0.0.tar.gz
checksum: 36eea2ee465e8a584c9be8c79dcbe770
- name: FADNet_1_pt
type: xmodel
board: vck190
download link: https://www.xilinx.com/bin/public/openDownload?filename=FADNet_1_pt-vck190-r2.0.0.tar.gz
checksum: 964eb3313e8a2ef7205e9fe84b3cd7f6
- name: FADNet_2_pt
type: xmodel
board: vck190
download link: https://www.xilinx.com/bin/public/openDownload?filename=FADNet_2_pt-vck190-r2.0.0.tar.gz
checksum: 85ba61b4a9506ebd7d0d0067c36c0b31
license: https://github.com/Xilinx/Vitis-AI/blob/master/LICENSE
|
---
name: check release
on:
workflow_dispatch:
inputs:
tag:
description: Release tag
required: true
env:
CM3RELEASE: https://github.com/modula3/cm3/releases/download/${{ github.event.inputs.tag }}
jobs:
AMD64_DARWIN:
runs-on: macos-latest
env:
CM3DIST: cm3-dist-AMD64_LINUX-${{ github.event.inputs.tag }}.tar.xz
steps:
- run: |
mkdir ../build ../dist
curl --location --silent "${CM3RELEASE}/${CM3DIST}" | gtar Jxf - --directory=../dist --strip-components=1
cd ../build
../dist/scripts/concierge.py install --prefix ../install
AMD64_LINUX:
runs-on: ubuntu-latest
env:
CM3DIST: cm3-dist-AMD64_LINUX-${{ github.event.inputs.tag }}.tar.xz
steps:
- run: |
mkdir ../build ../dist
curl --location --silent "${CM3RELEASE}/${CM3DIST}" | tar Jxf - --directory=../dist --strip-components=1
cd ../build
../dist/scripts/concierge.py install --prefix ../install
AMD64_MINGW:
runs-on: windows-latest
env:
CM3DIST: cm3-dist-AMD64_NT-${{ github.event.inputs.tag }}.7z
steps:
- uses: msys2/setup-msys2@v2
with:
msystem: mingw64
install: >-
mingw-w64-x86_64-cmake
mingw-w64-x86_64-ninja
mingw-w64-x86_64-toolchain
p7zip
wget
- run: |
mkdir ../build
wget --quiet "${CM3RELEASE}/${CM3DIST}"
7z x -o.. "${CM3DIST}"
cd ../build
cmake -S ../cm3/bootstrap -B . -DCMAKE_INSTALL_PREFIX=../cm3/install -DCMAKE_LIBRARY_PATH=/mingw64/x86_64-w64-mingw32/lib
../cm3/scripts/concierge.py install --prefix ../install
shell: msys2 {0}
AMD64_NT:
runs-on: windows-latest
env:
CM3DIST: cm3-dist-AMD64_NT-${{ github.event.inputs.tag }}.7z
steps:
- run: choco install wget
- uses: egor-tensin/vs-shell@v2
with: {arch: x64}
- run: |
mkdir ..\build
wget --quiet "${{ env.CM3RELEASE }}/${{ env.CM3DIST }}"
7z x -o.. "${{ env.CM3DIST }}"
cd ..\build
python3 ..\cm3\scripts\concierge.py install --prefix ..\install
shell: cmd
I386_NT:
runs-on: windows-latest
env:
CM3DIST: cm3-dist-I386_NT-${{ github.event.inputs.tag }}.7z
steps:
- run: choco install wget
- uses: egor-tensin/vs-shell@v2
with: {arch: x86}
- run: |
mkdir ..\build
wget --quiet "${{ env.CM3RELEASE }}/${{ env.CM3DIST }}"
7z x -o.. "${{ env.CM3DIST }}"
cd ..\build
python3 ..\cm3\scripts\concierge.py install --prefix ..\install --target I386_NT
shell: cmd
|
<reponame>josnelihurt/de10nano-dev<filename>docker-compose.yml<gh_stars>0
version: "3.5"
services:
builder:
build:
context: .
target: builder_runner
network: host
image: de10-nano-builder:v1
container_name: de10nano-builder
ports:
- "4400:4000"
environment:
- PASSWORD=password
- TZ=America/Bogota
cap_add:
- SYS_PTRACE
volumes:
- /dev:/dev
- .config/.altera.quartus/:/home/builder/.altera.quartus/:rw
- ./workspace_persistent:/home/builder/external_workspace:rw
privileged: true
builder-micro-sd:
build:
context: .
target: builder_micro_sd
image: de10-nano-builder-micro-sd:v1
container_name: de10nano-builder-micro-sd
cap_add:
- SYS_PTRACE
volumes:
- /dev:/dev
- ./workspace_persistent:/output:rw
privileged: true
# builder-eds:
# build:
# context: eds-tools
# image: de10-nano-builder-eds:v1
# container_name: de10nano-builder-eds
# volumes:
# - ./workspace_persistent:/workspace_persistent:rw
# privileged: true |
<reponame>dpretet/bster
name: smoke_tests
on: [push]
jobs:
run-smoke-tests:
runs-on: ubuntu-latest
steps:
- run: sudo apt-get install iverilog
- run: cd ../../; ./flow.sh sim
name: Run core's unit test suite
shell: bash
|
language: rust
os: linux
dist: trusty
addons:
apt:
packages:
- libssl-dev
cache: cargo
rust:
- stable
- beta
- nightly
jobs:
allow_failures:
- rust: nightly
script:
- cargo clean
- cargo build --all
- cargo test --all
- (cd c-api/bindings/c_cpp/cpp_example && make)
after_success: |
if [[ "$TRAVIS_RUST_VERSION" == nightly ]]; then
cargo bench
fi
if [[ "$TRAVIS_RUST_VERSION" == stable ]]; then
bash <(curl https://raw.githubusercontent.com/xd009642/tarpaulin/master/travis-install.sh)
cargo tarpaulin --all --out Xml
bash <(curl -s https://codecov.io/bash)
fi
|
<filename>src_files.yml
per2axi:
files: [
src/per2axi_busy_unit.sv,
src/per2axi_req_channel.sv,
src/per2axi_res_channel.sv,
src/per2axi.sv,
]
|
name: vitis-ai-rnn
channels:
- pytorch
- conda-forge
- defaults
dependencies:
- python=3.6
- vai_c_rnn
- tf_nndct_lstm
- pytorch_nndct_lstm
- torchvision==0.5.0
|
#
# List of RTL sources. Contrarily to IPs, these reside in
# the current Git repository.
# Uses the YAML syntax.
#
rtl:
path: ..
domain: [cluster]
common_cells:
path: ../../common_cells
domain: [cluster]
|
<gh_stars>100-1000
# Copyright 2020 ETH Zurich and University of Bologna.
# Solderpad Hardware License, Version 0.51, see LICENSE for details.
# SPDX-License-Identifier: SHL-0.51
package:
name: snitch_dma
authors:
- <NAME> <<EMAIL>>
dependencies:
common_cells: {path: ../../vendor/pulp_platform_common_cells}
axi: {path: ../../vendor/pulp_platform_axi}
snitch: {path: ../../ip/snitch}
sources:
# Level 0:
- src/axi_dma_pkg.sv
# Level 1:
- src/axi_dma_error_handler.sv
- src/axi_dma_perf_counters.sv
- src/axi_dma_twod_ext.sv
# Level 2:
- src/axi_dma_tc_snitch_fe.sv
|
before_script:
- echo start testing $CI_PROJECT_NAME.
- git clone gits@xcdl190260:aisw/unilog
- git clone gits@xcdl190260:aisw/target_factory
- git clone gits@xcdl190260:aisw/xir
- git clone gits@xcdl190260:aisw/vart
build_on_host:
tags:
- aisw
script:
- source /opt/rh/devtoolset-9/enable
- (cd unilog; ./cmake.sh --clean)
- (cd target_factory; ./cmake.sh --clean)
- (cd xir; ./cmake.sh --clean --build-python)
- (cd vart; ./cmake.sh --clean --build-python --cmake-options=-DENABLE_CPU_RUNNER=off --cmake-options=-DENABLE_SIM_RUNNER=off --cmake-options=-DENABLE_DPU_RUNNER=on)
- ./cmake.sh --cmake-options=-DENABLE_OVERVIEW=on --clean
- cd sample_client && ./cmake.sh --clean
build_for_petalinux:
tags:
- aisw
script:
- source /var/lib/docker/scratch/gitlab-runner/build/sdk-0618/environment-setup-aarch64-xilinx-linux
- (cd unilog; ./cmake.sh --clean)
- (cd target_factory; ./cmake.sh --clean)
- (cd xir; ./cmake.sh --clean --build-python)
- (cd vart; ./cmake.sh --clean --build-python --cmake-options=-DENABLE_CPU_RUNNER=off --cmake-options=-DENABLE_SIM_RUNNER=off --cmake-options=-DENABLE_DPU_RUNNER=on)
- ./cmake.sh --cmake-options=-DENABLE_OVERVIEW=on --clean
- cd sample_client && ./cmake.sh --clean
|
<filename>src/test/resources/samples/langs/YAML/vcr_cassette.yml<gh_stars>1000+
---
http_interactions:
- request:
method: get
uri: http://example.com/
body: ''
headers: {}
response:
status:
code: 200
message: OK
headers:
Content-Type:
- text/html;charset=utf-8
Content-Length:
- '26'
body: This is the response body
http_version: '1.1'
recorded_at: Tue, 01 Nov 2011 04:58:44 GMT
recorded_with: VCR 2.0.0
|
name: documentation
on:
push:
branches: [main]
jobs:
build:
name: Sphinx Pages
runs-on: ubuntu-latest
steps:
- name: Checkout ποΈ
uses: actions/checkout@v2
- name: Setup and build π§
run: |
sudo apt-get install python3-pip
pip3 install sphinx sphinx-vhdl
sphinx-build -M html doc/source public
touch public/html/.nojekyll
- name: Deploy π
uses: JamesIves/github-pages-deploy-action@4.1.4
with:
branch: gh-pages
folder: public/html
|
package:
name: ne16
authors:
- "<NAME> <<EMAIL>>"
dependencies:
hwpe-stream: { git: "https://github.com/pulp-platform/hwpe-stream.git", version: 1.6 }
hci: { git: "https://github.com/pulp-platform/hci.git", version: 1.0.6 }
hwpe-ctrl: { git: "https://github.com/pulp-platform/hwpe-ctrl.git", version: 1.6 }
sources:
- rtl/ne16_package.sv
- rtl/accumulator/ne16_accumulator_scm_test_wrap.sv
- rtl/input_buffer/ne16_input_buffer_scm_test_wrap.sv
- rtl/accumulator/ne16_accumulator_scm.sv
- rtl/accumulator/ne16_accumulator_normquant.sv
- rtl/accumulator/ne16_normquant.sv
- rtl/accumulator/ne16_normquant_shifter.sv
- rtl/accumulator/ne16_normquant_bias.sv
- rtl/accumulator/ne16_normquant_multiplier.sv
- rtl/input_buffer/ne16_input_buffer_scm.sv
- rtl/input_buffer/ne16_input_buffer.sv
- rtl/array/ne16_scale.sv
- rtl/array/ne16_binconv_block.sv
- rtl/array/ne16_binconv_column.sv
- rtl/array/ne16_binconv_array.sv
- rtl/ctrl/ne16_ctrl_fsm.sv
- rtl/ctrl/ne16_ctrl.sv
- rtl/ne16_engine.sv
- rtl/ne16_streamer.sv
- rtl/ne16_top.sv
- rtl/ne16_top_wrap.sv |
# Python environment and dependencies to build the documentation
name: cocotb-docs
dependencies:
- python=3.8
- pip
# Packages installed from PyPI
- pip:
- -r requirements.txt
|
<reponame>flasonil/pulp_cluster
#
# List of IPs and relative branch/commit-hash/tag.
# Uses the YAML syntax.
#
axi/axi2mem:
commit: d704c8beff729476d1dbfd7b60679afa796947d2
domain: [soc, cluster]
group: pulp-platform
axi/axi2per:
commit: tags/v1.0.1
domain: [cluster]
group: pulp-platform
axi/per2axi:
commit: tags/v1.0.3
domain: [soc, cluster]
group: pulp-platform
axi/axi_size_conv:
commit: tags/pulp-v1.0
domain: [cluster]
group: pulp-platform
cluster_interconnect:
commit: tags/v1.0.3
domain: [cluster]
group: pulp-platform
event_unit_flex:
commit: tags/v1.0.1
domain: [cluster]
group: pulp-platform
mchan:
commit: tags/v1.1.0
domain: [cluster]
group: pulp-platform
hier-icache:
commit: tags/v1.0.1
domain: [cluster]
group: pulp-platform
icache-intc:
commit: tags/v1.0.1
domain: [cluster]
group: pulp-platform
icache_mp_128_pf:
commit: tags/v1.0.0
domain: [cluster]
group: pulp-platform
icache_private:
commit: b14de95b0f70e2edb54ccc7076d5737b41249411
domain: [cluster]
group: pulp-platform
cluster_peripherals:
commit: tags/v1.0.1
domain: [cluster]
group: pulp-platform
fpu_interco:
commit: tags/v1.2.4
domain: [soc, cluster]
group: pulp-platform
|
name: CI/VHDLTest
on:
push:
branches: [ master ]
pull_request:
branches: [ master ]
jobs:
build:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v2
- uses: ghdl/setup-ghdl-ci@nightly
with:
backend: mcode
- name: Setup Python
uses: actions/setup-python@v2
- name: Install VUnit
run: python -m pip install VHDLTest
- name: VHDLTest Utility
run: python -m VHDLTest --config vhdltest.yaml
working-directory: fpga/common
|
title: AHB-Lite Multi-Layer Interconnect Switch
description: Parameterised AHB-Lite Multi-layer Interconnect Switch
#theme: jekyll-theme-dinky
show_downloads: true
show_license: true
license: Non-Commercial License
repository: RoaLogic/ahb3lite_interconnect
url: https://roalogic.github.io
baseurl: /ahb3lite_interconnect
include:
- _pages # Non-datasheet GH Pages
exclude:
- "*.sublime-project"
- "*.sublime-workspace"
- "*.graffle" # Original Omnigraffle sources
- "*.sh" # Ignore any scripts
- "/assets/img/*.pdf" # PDF images used for PDF Datasheet
- "/assets/img/*.eps" # EPS images used for PDF Datasheet
- markdown # Markdown Datasheet generation scripts
- pkg # PDF Datasheet layout definition
- tex # Original source texfiles
- Gemfile
- Gemfile.lock
defaults:
# _pages
- scope:
path : ""
type : pages
values:
layout : roalogic # Use custom layout for *all* pages
author : <NAME>
|
timer_unit:
incdirs: [
rtl,
]
files: [
./rtl/apb_timer_unit.sv,
./rtl/timer_unit.sv,
./rtl/timer_unit_counter.sv,
./rtl/timer_unit_counter_presc.sv,
]
|
<reponame>hito0512/Vitis-AI
name: vitis-ai-pytorch
channels:
- pytorch
- conda-forge
- anaconda
dependencies:
- python=3.6
- pytorch_nndct_cpu
- pyyaml
- jupyter
- ipywidgets
- dill
- progressbar2
- pytest
- scikit-learn
- pandas
- matplotlib
- vaic
|
<reponame>asb/opentitan
# Copyright lowRISC contributors.
# Licensed under the Apache License, Version 2.0, see LICENSE for details.
# SPDX-License-Identifier: Apache-2.0
# An example custom configuration that generates lots of loops (100
# times as many as the default config), but constrains them not to
# take too long. We also force the generator not to make long loop
# tails (the straight-line code that appears up to and including the
# last instruction of the loop). The idea is that we'll be much more
# likely to get deeply nested loops this way.
inherit: base
gen-weights:
Loop: 100
ranges:
max-loop-iters: 2
max-loop-tail-insns: 4
|
---
title: Writing efficient code
author:
- Felix "xq" QueiΓner
date: April 28, 2020
abstract: The <i>SPU Mark II</i> allows one to write very compact and efficient code. In this document, you learn about different tricks on how to do this. |
udma_sdio:
files: [
rtl/sdio_crc7.sv,
rtl/sdio_crc16.sv,
rtl/sdio_txrx_cmd.sv,
rtl/sdio_txrx_data.sv,
rtl/sdio_txrx.sv,
rtl/udma_sdio_reg_if.sv,
rtl/udma_sdio_top.sv,
]
|
ePixHr10kT:
enable: True
ForceWrite: False
InitAfterConfig: False
EpixHR:
enable: True
MMCMRegisters:
enable: True
CLKOUT0PhaseMux: 0
CLKOUT0HighTime: 2
CLKOUT0LowTime: 2
CLKOUT0Frac: 4
CLKOUT0FracEn: 1
CLKOUT0Edge: 0
CLKOUT0NoCount: 0
CLKOUT0DelayTime: 0
CLKOUT1PhaseMux: 0
CLKOUT1HighTime: 10
CLKOUT1LowTime: 10
CLKOUT1Edge: 0
CLKOUT1NoCount: 0
CLKOUT1DelayTime: 0
CLKOUT2PhaseMux: 0
CLKOUT2HighTime: 5
CLKOUT2LowTime: 5
CLKOUT2Edge: 0
CLKOUT2NoCount: 0
CLKOUT2DelayTime: 0
CLKOUT3PhaseMux: 0
CLKOUT3HighTime: 1
CLKOUT3LowTime: 1
CLKOUT3Edge: 0
CLKOUT3NoCount: 1
CLKOUT3DelayTime: 0
CLKOUT4PhaseMux: 0
CLKOUT4HighTime: 1
CLKOUT4LowTime: 1
CLKOUT4Edge: 0
CLKOUT4NoCount: 1
CLKOUT4DelayTime: 0
CLKOUT5PhaseMux: 0
CLKOUT5HighTime: 1
CLKOUT5LowTime: 1
CLKOUT5Edge: 0
CLKOUT5NoCount: 1
CLKOUT5DelayTime: 0
CLKOUT6PhaseMux: 0
CLKOUT6HighTime: 1
CLKOUT6LowTime: 1
CLKOUT6Edge: 0
CLKOUT6NoCount: 1
CLKOUT6DelayTime: 0 |
---
name: fft
board: boards/red-pitaya
version: 0.1.1
cores:
- fpga/cores/axi_ctl_register_v1_0
- fpga/cores/axi_sts_register_v1_0
- fpga/cores/dna_reader_v1_0
- fpga/cores/latched_mux_v1_0
- fpga/cores/axis_constant_v1_0
- fpga/cores/redp_adc_v1_0
- fpga/cores/redp_dac_v1_0
- fpga/cores/psd_counter_v1_0
- fpga/cores/comparator_v1_0
- fpga/cores/saturation_v1_0
- fpga/cores/pdm_v1_0
- fpga/cores/at93c46d_spi_v1_0
- fpga/cores/bus_multiplexer_v1_0
memory:
- name: control
offset: '0x40000000'
range: 4K
- name: status
offset: '0x50000000'
range: 4K
- name: xadc
offset: '0x43C00000'
range: 64K
- name: demod
offset: '0x60000000'
range: 8K
- name: psd
offset: '0x70000000'
range: 8K
- name: adc_fifo
offset: '0x43C10000'
range: 32K
control_registers:
- led
- mmcm
- ctl_fft
- psd_valid
- psd_input_sel
- phase_incr[2]
- laser_current
- laser_control
- power_setpoint
- eeprom_ctl
status_registers:
- adc[n_adc]
- cycle_index
- eeprom_sts
- pid_control
parameters:
fclk0: 187500000
adc_clk: 125000000
dac_width: 14
adc_width: 14
pwm_width: 12
n_adc: 2
fft_size: 2048
n_cycles: 1023
cic_differential_delay: 1
cic_decimation_rate: 250
cic_n_stages: 6
xdc:
- boards/red-pitaya/config/ports.xdc
- boards/red-pitaya/config/clocks.xdc
- ./expansion_connector.xdc
drivers:
- server/drivers/common.hpp
- server/drivers/xadc.hpp
- server/drivers/laser.hpp
- server/drivers/eeprom.hpp
- ./drivers/fft.hpp
- ./drivers/demodulator.hpp
- ./drivers/redpitaya_adc_calibration.hpp
web:
- web/koheron.ts
- web/jquery.flot.d.ts
- web/laser.ts
- web/laser-control.html
- web/dds-frequency/dds-frequency.ts
- web/dds-frequency/dds-frequency.html
- ./web/app.ts
- ./web/fft.ts
- ./web/plot/plot.ts
- web/plot-basics/plot-basics.ts
- web/plot-basics/plot-basics.html
- ./web/plot/yunit.html
- ./web/plot/peak-detection.html
- ./web/fft/fft-app.ts
- ./web/fft/fft-window.html
- ./web/fft/input-channel.html
- ./web/export-file/export-file.html
- ./web/export-file/export-file.ts
- ./web/index.html
- web/main.css |
#this is a example of how ENC ip is used in our yaml
PWM_OUT:
IP: __PWM_OUT
CMD: PWM
pha:
PIN: A34
DIRECTION: out
phb:
PIN: A33
DIRECTION: out
|
<filename>conda-recipe/meta.yaml
package:
name: lsst_pwr_ctrl_core
version: {{ GIT_DESCRIBE_TAG }}
source:
path: ..
build:
number: {{ GIT_DESCRIBE_NUMBER|int }}
requirements:
build:
- python
- rogue
- git
- gitpython
- numpy
run:
- python
- rogue
- numpy
about:
home: https://github.com/slaclab/lsst-pwr-ctrl-core
license: SLAC Open License
license_file: LICENSE.txt
summary: LSST Power Controller Core Library
|
os: linux
dist: bionic
language: generic
env:
jobs:
- LISP=sbcl
- LISP=sbcl32
- LISP=ccl
- LISP=ccl32
- LISP=ecl
- LISP=allegro
- LISP=cmucl
# - LISP=abcl
jobs:
allow_failures:
- env: LISP=abcl
- env: LISP=allegro
- env: LISP=ccl32
- env: LISP=cmucl
notifications:
email:
on_success: change
on_failure: always
irc:
channels:
- "chat.freenode.net#iolib"
on_success: change
on_failure: always
use_notice: true
skip_join: true
install:
- curl -L https://raw.githubusercontent.com/sionescu/cl-travis/master/install.sh | sh
- cl -e "(cl:in-package :cl-user)
(dolist (p '(:fiveam))
(ql:quickload p :verbose t))"
script:
- cl -e "(cl:in-package :cl-user)
(prin1 (lisp-implementation-type)) (terpri) (prin1 (lisp-implementation-version)) (terpri)
(ql:quickload :bordeaux-threads/test :verbose t)
(uiop:quit (if (some (lambda (x) (typep x '5am::test-failure))
(5am:run :bordeaux-threads))
1 0))"
|
<filename>.github/workflows/images.yml
name: 'images'
on:
push:
paths:
- '.github/workflows/images.yml'
- '.github/images.sh'
schedule:
- cron: '0 0 * * 5'
env:
DOCKER_BUILDKIT: '1'
jobs:
build:
strategy:
fail-fast: false
max-parallel: 2
matrix:
task: [
{ tag: llvm, pkg: llvm-7 },
{ tag: mcode, pkg: mcode },
]
runs-on: ubuntu-latest
env:
DOCKER_REGISTRY: docker.pkg.github.com
steps:
- uses: actions/checkout@v2
- name: Build image
env:
TAG: ${{ matrix.task.tag }}
PKG: ${{ matrix.task.pkg }}
run: |
./.github/images.sh
- name: Docker login
run: echo "${{ secrets.GITHUB_TOKEN }}" | docker login -u vunit-gha --password-stdin "$DOCKER_REGISTRY"
- name: Docker push
if: github.repository == 'VUnit/vunit'
run: |
DIMG="vunit/dev:${{ matrix.task.tag }}"
GHIMG="${DOCKER_REGISTRY}/vunit/$DIMG"
docker tag "$DIMG" "$GHIMG"
docker push "$GHIMG"
- name: Docker logout
run: docker logout "$DOCKER_REGISTRY"
if: always()
|
<gh_stars>0
package:
name: ara
authors:
- "<NAME> <<EMAIL>>"
- "<NAME> <<EMAIL>>"
- "<NAME> <<EMAIL>>"
- "<NAME> <<EMAIL>>"
- "<NAME> <<EMAIL>>"
dependencies:
axi: { git: "https://github.com/pulp-platform/axi.git", version: 0.29.1 }
common_cells: { git: "https://github.com/pulp-platform/common_cells.git", version: 1.22.1 }
cva6: { git: "https://github.com/pulp-platform/cva6.git", rev: acc_port }
tech_cells_generic: { git: "https://github.com/pulp-platform/tech_cells_generic.git", version: 0.2.1 }
workspace:
checkout_dir: "hardware/deps"
export_include_dirs:
- hardware/include
sources:
files:
# Headers
- hardware/include/rvv_pkg.sv
- hardware/include/ara_pkg.sv
# Sources
# Level 1
- hardware/src/axi_to_mem.sv
- hardware/src/ctrl_registers.sv
- hardware/src/cva6_accel_first_pass_decoder.sv
- hardware/src/ara_dispatcher.sv
- hardware/src/ara_sequencer.sv
- hardware/src/axi_inval_filter.sv
- hardware/src/lane/lane_sequencer.sv
- hardware/src/lane/operand_queue.sv
- hardware/src/lane/operand_requester.sv
- hardware/src/lane/simd_alu.sv
- hardware/src/lane/simd_div.sv
- hardware/src/lane/simd_mul.sv
- hardware/src/lane/vector_regfile.sv
- hardware/src/masku/masku.sv
- hardware/src/sldu/sldu.sv
- hardware/src/vlsu/addrgen.sv
- hardware/src/vlsu/vldu.sv
- hardware/src/vlsu/vstu.sv
# Level 2
- hardware/src/lane/operand_queues_stage.sv
- hardware/src/lane/valu.sv
- hardware/src/lane/vmfpu.sv
- hardware/src/vlsu/vlsu.sv
# Level 3
- hardware/src/lane/vector_fus_stage.sv
# Level 4
- hardware/src/lane/lane.sv
# Level 5
- hardware/src/ara.sv
# Level 6
- hardware/src/ara_soc.sv
- target: ara_test
files:
# Level 1
- hardware/tb/ara_testharness.sv
# Level 2
- hardware/tb/ara_tb.sv
- target: verilator
files:
# Level 2
- hardware/tb/ara_tb_verilator.sv
|
<gh_stars>0
# Device description for the Agilent 33250A Function/Arbitrary Waveform Generator.
# set_ function expect a parameter, get_ function return a parameter.
# Just the very basic commands are imlemented here.
identifier : Agilent Technologies,33250A
set_burst : BURST:STAT
get_burst : BURST:STAT?
set_n_bursts : BURS:NCYC
set_pulse_period : PULS:PER
set_pulse_width : PULS:WIDT
set_trigger_delay : TRIG:DELAY
get_voltage_high : VOLT:HIGH?
get_voltage_low : VOLT:LOW?
set_voltage_high : VOLT:HIGH
set_voltage_low : VOLT:LOW
|
<reponame>mithro/hgdb
name: Deployment Linux
on:
push:
tags:
- '*'
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Checkout submodules π₯οΈ
shell: bash
run: |
auth_header="$(git config --local --get http.https://github.com/.extraheader)"
git submodule sync --recursive
git -c "http.extraheader=$auth_header" -c protocol.version=2 submodule update --init --force --recursive --depth=1
- name: Build libhgdb π‘
shell: bash
run: bash scripts/deploy.sh
- name: Build Python hgdb π‘
shell: bash
run: bash bindings/python/scripts/deploy.sh
- name: Copy wheels to the same place π
shell: bash
run: sudo cp bindings/python/wheelhouse/* wheelhouse/
- name: Upload artifact π
uses: actions/upload-artifact@v2
with:
name: Python Wheels
path: wheelhouse
- name: Publish distribution π¦ to PyPI
uses: pypa/gh-action-pypi-publish@master
with:
user: keyi
password: ${{ secrets.<PASSWORD> }}
skip_existing: true
packages_dir: wheelhouse/ |
name: i2c_master_top
clock_port: wb_clk_i
verilog:
- i2c_master_bit_ctrl.v
- i2c_master_byte_ctrl.v
- i2c_master_defines.v
- i2c_master_top.v
- timescale.v
|
<gh_stars>10-100
before_install:
- sudo apt-get install scala verilator
language: scala
scala:
- 2.12.6
jdk:
- openjdk8
script:
- make
|
---
networkRows: 1
networkCols: 1
foxNetworkStages: 1
foxNodeFifos:
peToNetwork: 0
networkToPe: 0
resultNodeCoord:
x: 0
y: 0
resultNodeFifos:
peToNetwork: 0
networkToPe: 0
resultUartFifoDepth: 1024
romNodeCoord:
x: 0
y: 0
packetFormat:
multicastGroupBits: 1
multicastCoordBits: 1
readyFlagBits: 1
resultFlagBits: 1
matrixTypeBits: 1
matrixCoordBits: 8
matrixElementBits: 32
useMatrixInitFile: True
multicastAvailable: False
|
# TODO: Set number of vectors parameter in python file and testbench from yml
name: testbench
outputs:
- testbench.sv
- test_vectors.txt
- design.args
parameters:
testbench_name: GcdUnitTb
dut_name: GcdUnit_inst
commands:
- cp GcdUnitTb.sv outputs/testbench.sv
- python3 generate_test_vectors.py
- cp test_vectors.txt outputs/test_vectors.txt
|
<filename>hw/ip/otbn/dv/rig/rig/configs/straight-line.yml
# Copyright lowRISC contributors.
# Licensed under the Apache License, Version 2.0, see LICENSE for details.
# SPDX-License-Identifier: Apache-2.0
# An custom configuration that doesn't generate any branches, and
# increases the weight for BadCallStackRW. The idea is that
# BadCallStackRW needs to know the depth of the call stack in order to
# under- or overflow it. Branches tend to make this less likely
# (because if the two sides of the branch use x1 differently, we'll
# end up with an unknown depth after the phi node), so turning them
# off makes it much more likely to work.
inherit: base
gen-weights:
Branch: 0
BadCallStackRW: 100
|
name: Export KiCAD Designs
on:
#push:
# branches: [ main ]
# paths: ['circuits/**']
#pull_request:
# branches: [ main ]
# paths: ['circuits/**']
# Allows you to run this workflow manually from the Actions tab
workflow_dispatch:
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: nerdyscout/kicad-exports@v2.2
with:
config: docs.kibot.yaml
dir: docs
schema: '*.sch'
#board: '*.*pcb'
- name: upload results
uses: actions/upload-artifact@v2
with:
name: docs
path: docs
|