summaryrefslogtreecommitdiff
path: root/tools
diff options
context:
space:
mode:
authorAdrian Szyndela <adrian.s@samsung.com>2020-03-26 16:35:12 +0100
committerAdrian Szyndela <adrian.s@samsung.com>2020-03-26 16:35:12 +0100
commit92c306c8e8329e9d68c52549d75e221fa08b61d9 (patch)
treea42d8a6284b6b6148b139de4be94fe23a3369033 /tools
parentacdc1dd584c4c6cf32187adf95d43c4fe87b149f (diff)
parent1742aae2aa8cd33897250d6fcfbe10928e43eb2f (diff)
downloadsystemd-92c306c8e8329e9d68c52549d75e221fa08b61d9.tar.gz
systemd-92c306c8e8329e9d68c52549d75e221fa08b61d9.tar.bz2
systemd-92c306c8e8329e9d68c52549d75e221fa08b61d9.zip
Merge v240 into tizen
systemd 240
Diffstat (limited to 'tools')
-rwxr-xr-xtools/catalog-report.py1
-rwxr-xr-xtools/check-directives.sh21
-rwxr-xr-xtools/check-includes.pl2
-rwxr-xr-xtools/coverity.sh6
-rw-r--r--tools/gdb-sd_dump_hashmaps.py20
-rwxr-xr-xtools/generate-gperfs.py24
-rwxr-xr-xtools/make-directive-index.py1
-rwxr-xr-xtools/make-index-md.sh32
-rwxr-xr-xtools/make-man-index.py1
-rwxr-xr-xtools/make-man-rules.py3
-rwxr-xr-xtools/meson-build.sh4
-rwxr-xr-xtools/meson-check-api-docs.sh23
-rw-r--r--tools/meson-link-test.c1
-rwxr-xr-xtools/oss-fuzz.sh13
-rwxr-xr-xtools/xml_helper.py3
15 files changed, 128 insertions, 27 deletions
diff --git a/tools/catalog-report.py b/tools/catalog-report.py
index b65869d02b..ca1e13df9a 100755
--- a/tools/catalog-report.py
+++ b/tools/catalog-report.py
@@ -1,5 +1,4 @@
#!/usr/bin/env python3
-# -*- Mode: python; coding: utf-8; indent-tabs-mode: nil -*- */
# SPDX-License-Identifier: MIT
#
# This file is distributed under the MIT license, see below.
diff --git a/tools/check-directives.sh b/tools/check-directives.sh
new file mode 100755
index 0000000000..e2fd38898f
--- /dev/null
+++ b/tools/check-directives.sh
@@ -0,0 +1,21 @@
+#!/bin/bash
+
+set -e
+
+function generate_directives() {
+ perl -aF'/[\s,]+/' -ne '
+ if (my ($s, $d) = ($F[0] =~ /^([^\s\.]+)\.([^\s\.]+)$/)) { $d{$s}{"$d="} = 1; }
+ END { while (my ($key, $value) = each %d) {
+ printf "[%s]\n%s\n", $key, join("\n", keys(%$value))
+ }}' "$1"
+}
+
+if [[ $(generate_directives src/network/networkd-network-gperf.gperf | wc -l) -ne $(wc -l <test/fuzz/fuzz-network-parser/directives.network) ]]; then
+ echo "Looks like test/fuzz/fuzz-network-parser/directives.network hasn't been updated"
+ exit 1
+fi
+
+if [[ $(generate_directives src/network/netdev/netdev-gperf.gperf | wc -l) -ne $(wc -l <test/fuzz/fuzz-netdev-parser/directives.netdev) ]]; then
+ echo "Looks like test/fuzz/fuzz-netdev-parser/directives.netdev hasn't been updated"
+ exit 1
+fi
diff --git a/tools/check-includes.pl b/tools/check-includes.pl
index 6aae7c1534..c8bfcba8c0 100755
--- a/tools/check-includes.pl
+++ b/tools/check-includes.pl
@@ -1,7 +1,7 @@
+# SPDX-License-Identifier: CC0-1.0
#!/usr/bin/env perl
#
# checkincludes: Find files included more than once in (other) files.
-# Copyright abandoned, 2000, Niels Kristian Bech Jensen <nkbj@image.dk>.
foreach $file (@ARGV) {
open(FILE, $file) or die "Cannot open $file: $!.\n";
diff --git a/tools/coverity.sh b/tools/coverity.sh
index 561d127d1e..af4c920cd2 100755
--- a/tools/coverity.sh
+++ b/tools/coverity.sh
@@ -42,13 +42,13 @@ if [ "$AUTH_RES" = "Access denied" ]; then
echo -e "\033[33;1mCoverity Scan API access denied. Check COVERITY_SCAN_PROJECT_NAME and COVERITY_SCAN_TOKEN.\033[0m"
exit 1
else
- AUTH=`echo $AUTH_RES | python -c "import sys, json; print json.load(sys.stdin)['upload_permitted']"`
+ AUTH=`echo $AUTH_RES | python -c "import sys, json; print(json.load(sys.stdin)['upload_permitted'])"`
if [ "$AUTH" = "True" ]; then
echo -e "\033[33;1mCoverity Scan analysis authorized per quota.\033[0m"
else
- WHEN=`echo $AUTH_RES | python -c "import sys; json; print json.load(sys.stdin)['next_upload_permitted_at']"`
+ WHEN=`echo $AUTH_RES | python -c "import sys, json; print(json.load(sys.stdin)['next_upload_permitted_at'])"`
echo -e "\033[33;1mCoverity Scan analysis NOT authorized until $WHEN.\033[0m"
- exit 0
+ exit 1
fi
fi
diff --git a/tools/gdb-sd_dump_hashmaps.py b/tools/gdb-sd_dump_hashmaps.py
index e6ddd14ea7..4e8593f320 100644
--- a/tools/gdb-sd_dump_hashmaps.py
+++ b/tools/gdb-sd_dump_hashmaps.py
@@ -1,7 +1,8 @@
#!/usr/bin/env python3
-# -*- Mode: python; coding: utf-8; indent-tabs-mode: nil -*- */
# SPDX-License-Identifier: LGPL-2.1+
+from __future__ import print_function
+
import gdb
class sd_dump_hashmaps(gdb.Command):
@@ -14,12 +15,11 @@ class sd_dump_hashmaps(gdb.Command):
d = gdb.parse_and_eval("hashmap_debug_list")
all_entry_sizes = gdb.parse_and_eval("all_entry_sizes")
all_direct_buckets = gdb.parse_and_eval("all_direct_buckets")
- hashmap_base_t = gdb.lookup_type("HashmapBase")
uchar_t = gdb.lookup_type("unsigned char")
ulong_t = gdb.lookup_type("unsigned long")
debug_offset = gdb.parse_and_eval("(unsigned long)&((HashmapBase*)0)->debug")
- print "type, hash, indirect, entries, max_entries, buckets, creator"
+ print("type, hash, indirect, entries, max_entries, buckets, creator")
while d:
h = gdb.parse_and_eval("(HashmapBase*)((char*)%d - %d)" % (int(d.cast(ulong_t)), debug_offset))
@@ -34,7 +34,7 @@ class sd_dump_hashmaps(gdb.Command):
t = ["plain", "ordered", "set"][int(h["type"])]
- print "{}, {}, {}, {}, {}, {}, {} ({}:{})".format(t, h["hash_ops"], bool(h["has_indirect"]), n_entries, d["max_entries"], n_buckets, d["func"], d["file"], d["line"])
+ print("{}, {}, {}, {}, {}, {}, {} ({}:{})".format(t, h["hash_ops"], bool(h["has_indirect"]), n_entries, d["max_entries"], n_buckets, d["func"], d["file"], d["line"]))
if arg != "" and n_entries > 0:
dib_raw_addr = storage_ptr + (all_entry_sizes[h["type"]] * n_buckets)
@@ -46,10 +46,10 @@ class sd_dump_hashmaps(gdb.Command):
for dib in sorted(iter(histogram)):
if dib != 255:
- print "{:>3} {:>8} {} of entries".format(dib, histogram[dib], 100.0*histogram[dib]/n_entries)
+ print("{:>3} {:>8} {} of entries".format(dib, histogram[dib], 100.0*histogram[dib]/n_entries))
else:
- print "{:>3} {:>8} {} of slots".format(dib, histogram[dib], 100.0*histogram[dib]/n_buckets)
- print "mean DIB of entries: {}".format(sum([dib*histogram[dib] for dib in iter(histogram) if dib != 255])*1.0/n_entries)
+ print("{:>3} {:>8} {} of slots".format(dib, histogram[dib], 100.0*histogram[dib]/n_buckets))
+ print("mean DIB of entries: {}".format(sum([dib*histogram[dib] for dib in iter(histogram) if dib != 255])*1.0/n_entries))
blocks = []
current_len = 1
@@ -70,9 +70,9 @@ class sd_dump_hashmaps(gdb.Command):
if len(blocks) > 1 and blocks[0][0] == blocks[0][1] and blocks[-1][0] == n_buckets - 1:
blocks[0][1] += blocks[-1][1]
blocks = blocks[0:-1]
- print "max block: {}".format(max(blocks, key=lambda a: a[1]))
- print "sum block lens: {}".format(sum(b[1] for b in blocks))
- print "mean block len: {}".format((1.0 * sum(b[1] for b in blocks) / len(blocks)))
+ print("max block: {}".format(max(blocks, key=lambda a: a[1])))
+ print("sum block lens: {}".format(sum(b[1] for b in blocks)))
+ print("mean block len: {}".format((1.0 * sum(b[1] for b in blocks) / len(blocks))))
d = d["debug_list_next"]
diff --git a/tools/generate-gperfs.py b/tools/generate-gperfs.py
new file mode 100755
index 0000000000..5392df0ebb
--- /dev/null
+++ b/tools/generate-gperfs.py
@@ -0,0 +1,24 @@
+#!/usr/bin/env python3
+# SPDX-License-Identifier: LGPL-2.1+
+
+"""
+Generate %-from-name.gperf from %-list.txt
+"""
+
+import sys
+
+name, prefix, input = sys.argv[1:]
+
+print("""\
+%{
+#if __GNUC__ >= 7
+_Pragma("GCC diagnostic ignored \\"-Wimplicit-fallthrough\\"")
+#endif
+%}""")
+print("""\
+struct {}_name {{ const char* name; int id; }};
+%null-strings
+%%""".format(name))
+
+for line in open(input):
+ print("{0}, {1}{0}".format(line.rstrip(), prefix))
diff --git a/tools/make-directive-index.py b/tools/make-directive-index.py
index 8b85ef40f3..8703c8a37b 100755
--- a/tools/make-directive-index.py
+++ b/tools/make-directive-index.py
@@ -1,5 +1,4 @@
#!/usr/bin/env python3
-# -*- Mode: python; coding: utf-8; indent-tabs-mode: nil -*- */
# SPDX-License-Identifier: LGPL-2.1+
import sys
diff --git a/tools/make-index-md.sh b/tools/make-index-md.sh
new file mode 100755
index 0000000000..78506cbf39
--- /dev/null
+++ b/tools/make-index-md.sh
@@ -0,0 +1,32 @@
+#!/bin/sh
+
+set -eu
+
+cd "$@"/docs/
+(
+ echo -e "# systemd Documentation\n"
+
+ for f in *.md ; do
+ if [ "x$f" != "xindex.md" ] ; then
+ t=`grep "^# " "$f" | head -n 1 | sed -e 's/^#\s*//'`
+
+ if [ "x$f" = "xCODE_OF_CONDUCT.md" -o "x$f" = "xCONTRIBUTING.md" ] ; then
+ # For some reason GitHub refuses to generate
+ # HTML versions of these two documents,
+ # probably because they are in some way special
+ # in GitHub behaviour (as they are shown as
+ # links in the issue submission form). Let's
+ # work around this limitation by linking to
+ # their repository browser version
+ # instead. This might not even be such a bad
+ # thing, given that the issue submission form
+ # and our index file thus link to the same
+ # version.
+ u="https://github.com/systemd/systemd/blob/master/docs/$f"
+ else
+ u="https://systemd.io/"`echo "$f" | sed -e 's/.md$//'`
+ fi
+ echo "* [$t]($u)"
+ fi
+ done
+) > index.md
diff --git a/tools/make-man-index.py b/tools/make-man-index.py
index 7ed98cb4e0..66027af02e 100755
--- a/tools/make-man-index.py
+++ b/tools/make-man-index.py
@@ -1,5 +1,4 @@
#!/usr/bin/env python3
-# -*- Mode: python; coding: utf-8; indent-tabs-mode: nil -*- */
# SPDX-License-Identifier: LGPL-2.1+
import collections
diff --git a/tools/make-man-rules.py b/tools/make-man-rules.py
index 42a48bc98c..c4551c6f61 100755
--- a/tools/make-man-rules.py
+++ b/tools/make-man-rules.py
@@ -1,6 +1,5 @@
#!/usr/bin/env python3
-# -*- Mode: python; coding: utf-8; indent-tabs-mode: nil -*- */
-# SPDX-License-Identifier: LGPL-2.1+
+# SPDX-License-Identifier: LGPL-2.1+
from __future__ import print_function
import collections
diff --git a/tools/meson-build.sh b/tools/meson-build.sh
index 304a755676..dea554177d 100755
--- a/tools/meson-build.sh
+++ b/tools/meson-build.sh
@@ -5,8 +5,10 @@ src="$1"
dst="$2"
target="$3"
options="$4"
+CC="$5"
+CXX="$6"
-[ -d "$dst" ] || meson "$src" "$dst" $options
+[ -f "$dst/ninja.build" ] || CC="$CC" CXX="$CXX" meson "$src" "$dst" $options
# Locate ninja binary, on CentOS 7 it is called ninja-build, so
# use that name if available.
diff --git a/tools/meson-check-api-docs.sh b/tools/meson-check-api-docs.sh
index 5bc808c1e4..a654368f9e 100755
--- a/tools/meson-check-api-docs.sh
+++ b/tools/meson-check-api-docs.sh
@@ -2,10 +2,33 @@
set -eu
+sd_good=0
+sd_total=0
+udev_good=0
+udev_total=0
+
for symbol in `nm -g --defined-only "$@" | grep " T " | cut -d" " -f3 | sort -u` ; do
if test -f ${MESON_BUILD_ROOT}/man/$symbol.3 ; then
echo "✓ Symbol $symbol() is documented."
+ good=1
else
printf " \x1b[1;31mSymbol $symbol() lacks documentation.\x1b[0m\n"
+ good=0
fi
+
+ case $symbol in
+ sd_*)
+ ((sd_good+=good))
+ ((sd_total+=1))
+ ;;
+ udev_*)
+ ((udev_good+=good))
+ ((udev_total+=1))
+ ;;
+ *)
+ echo 'unknown symbol prefix'
+ exit 1
+ esac
done
+
+echo "libsystemd: $sd_good/$sd_total libudev: $udev_good/$udev_total"
diff --git a/tools/meson-link-test.c b/tools/meson-link-test.c
deleted file mode 100644
index 825bbff05f..0000000000
--- a/tools/meson-link-test.c
+++ /dev/null
@@ -1 +0,0 @@
-int main(void) {return 0;}
diff --git a/tools/oss-fuzz.sh b/tools/oss-fuzz.sh
index 200407fcca..9a116be114 100755
--- a/tools/oss-fuzz.sh
+++ b/tools/oss-fuzz.sh
@@ -35,8 +35,10 @@ fi
meson $build -D$fuzzflag -Db_lundef=false
ninja -C $build fuzzers
-for d in "$(dirname "$0")/../test/fuzz-corpus/"*; do
- zip -jqr $OUT/fuzz-$(basename "$d")_seed_corpus.zip "$d"
+# The seed corpus is a separate flat archive for each fuzzer,
+# with a fixed name ${fuzzer}_seed_corpus.zip.
+for d in "$(dirname "$0")/../test/fuzz/fuzz-"*; do
+ zip -jqr $OUT/$(basename "$d")_seed_corpus.zip "$d"
done
# get fuzz-dns-packet corpus
@@ -44,8 +46,11 @@ df=$build/dns-fuzzing
git clone --depth 1 https://github.com/CZ-NIC/dns-fuzzing $df
zip -jqr $OUT/fuzz-dns-packet_seed_corpus.zip $df/packet
-# install the private shared library without executable permissions
-install -Dt $OUT/src/shared/ -m 0644 $build/src/shared/libsystemd-shared-*.so
+install -Dt $OUT/src/shared/ $build/src/shared/libsystemd-shared-*.so
+
+wget -O $OUT/fuzz-json_seed_corpus.zip https://storage.googleapis.com/skia-fuzzer/oss-fuzz/skjson_seed_corpus.zip
+wget -O $OUT/fuzz-json.dict https://raw.githubusercontent.com/rc0r/afl-fuzz/master/dictionaries/json.dict
find $build -maxdepth 1 -type f -executable -name "fuzz-*" -exec mv {} $OUT \;
+find src -type f -name "fuzz-*.dict" -exec cp {} $OUT \;
cp src/fuzz/*.options $OUT
diff --git a/tools/xml_helper.py b/tools/xml_helper.py
index 19e343b4e3..f399e7493c 100755
--- a/tools/xml_helper.py
+++ b/tools/xml_helper.py
@@ -1,6 +1,5 @@
#!/usr/bin/env python3
-# -*- Mode: python; coding: utf-8; indent-tabs-mode: nil -*- */
-# SPDX-License-Identifier: LGPL-2.1+
+# SPDX-License-Identifier: LGPL-2.1+
from lxml import etree as tree