Skip to content

Commit

Permalink
Merge pull request #133 from CachyOS/dash-pgo
Browse files Browse the repository at this point in the history
add(dash): pgo
  • Loading branch information
lseman authored Feb 24, 2024
2 parents 8115ed4 + 894ce85 commit b8b372c
Show file tree
Hide file tree
Showing 3 changed files with 156 additions and 0 deletions.
62 changes: 62 additions & 0 deletions dash/PKGBUILD
Original file line number Diff line number Diff line change
@@ -0,0 +1,62 @@
# Maintainer: Levente Polyak <anthraxx[at]archlinux[dot]org>
# Contributor: Dan McGee <[email protected]>

pkgname=dash
pkgver=0.5.12
pkgrel=2
pkgdesc='POSIX compliant shell that aims to be as small as possible'
url='http://gondor.apana.org.au/~herbert/dash/'
arch=('x86_64')
license=('BSD')
depends=('glibc' 'libedit' 'libedit.so')
install=dash.install
source=(https://git.kernel.org/pub/scm/utils/dash/dash.git/snapshot/${pkgname}-${pkgver}.tar.gz
workload.dash)
sha512sums=('a5d2347465c0bad09a2921ecb55fb4e7fe451d627ed43c1da82b92259b539857c7a7f153dfee73cea4befcbb34388bb3585845148631262dfe6dec87390f049c')
b2sums=('e0751946fa3b6d513901cc98f3f39a48013f624b9a8ffd699e849456604b856866bd6da268560e6ffa9ec5b91976930d830297031804fdcbab82a953720ee973')

prepare() {
cd ${pkgname}-${pkgver}
autoreconf -fiv
}

build() {

export CFLAGS="${CFLAGS} -fprofile-generate -fprofile-update=atomic -fprofile-partial-training"
export CXXFLAGS="${CXXFLAGS} -fprofile-generate -fprofile-update=atomic -fprofile-partial-training"

cd ${pkgname}-${pkgver}
./configure \
--prefix=/usr \
--bindir=/usr/bin \
--mandir=/usr/share/man \
--exec-prefix="" \
--with-libedit
make V=1

cd ..
${pkgname}-${pkgver}/src/dash workload.dash

export CFLAGS="${CFLAGS//-fprofile-generate/}"
export CFLAGS="${CFLAGS//-fprofile-update=atomic/}"
export CXXFLAGS="${CXXFLAGS//-fprofile-generate/}"
export CXXFLAGS="${CXXFLAGS//-fprofile-update=atomic/}"
export CFLAGS="${CFLAGS} -fprofile-use"

cd ${pkgname}-${pkgver}
make clean
./configure \
--prefix=/usr \
--bindir=/usr/bin \
--mandir=/usr/share/man \
--exec-prefix="" \
--with-libedit
make V=1

}

package() {
cd ${pkgname}-${pkgver}
make DESTDIR="${pkgdir}" install
install -Dm 644 COPYING -t "${pkgdir}/usr/share/licenses/${pkgname}"
}
11 changes: 11 additions & 0 deletions dash/dash.install
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
post_install() {
grep -q '/bin/dash' etc/shells || echo '/bin/dash' >> etc/shells
}

post_upgrade() {
post_install
}

pre_remove() {
sed -i '/^\/bin\/dash/d' etc/shells
}
83 changes: 83 additions & 0 deletions dash/workload.dash
Original file line number Diff line number Diff line change
@@ -0,0 +1,83 @@
#!/bin/dash

# Increase complexity of mathematical computations
calculate_fibonacci() {
n=$1
if [ $n -le 1 ]; then
echo $n
else
echo $(( $(calculate_fibonacci $((n - 1))) + $(calculate_fibonacci $((n - 2))) ))
fi
}

# Calculating a larger set of prime numbers
calculate_primes() {
n=$1
count=0
number=2
primes=""
while [ $count -lt $n ]; do
is_prime=1
for i in $(seq 2 $(expr $number / 2)); do
if [ $(expr $number % $i) -eq 0 ]; then
is_prime=0
break
fi
done
if [ $is_prime -eq 1 ]; then
primes="$primes $number"
count=$((count + 1))
fi
number=$((number + 1))
done
echo $primes
}

# Simulating heavier data processing
generate_large_data() {
for i in $(seq 1 100000); do
echo $RANDOM
done > large_data.txt
}

sort_large_data() {
sort -n large_data.txt -o sorted_large_data.txt
}

filter_large_data() {
awk '{if ($1 % 2 == 0) print}' sorted_large_data.txt > filtered_large_data.txt
}

# More complex file I/O operations
complex_file_operations() {
echo "Generating large files for I/O operations..."
for i in $(seq 1 10); do
for j in $(seq 1 10000); do
echo "Line $j of file $i" >> "file_${i}.txt"
done
done

echo "Reading from large files..."
for i in $(seq 1 10); do
while IFS= read -r line; do
echo "/dev/null"
done < "file_${i}.txt"
done
}

# Main execution starts here
echo "Calculating Fibonacci for 20..."
calculate_fibonacci 20

echo "Calculating first 100 prime numbers..."
calculate_primes 100

echo "Generating, sorting, and filtering large dataset..."
generate_large_data
sort_large_data
filter_large_data

echo "Performing complex file write and read operations..."
complex_file_operations

echo "All tasks completed."

0 comments on commit b8b372c

Please sign in to comment.