Compare commits

..

No commits in common. 'dev' and 'v0.9.3' have entirely different histories.
dev ... v0.9.3

@ -54,8 +54,3 @@ PointerAlignment: Left
# when wrapping function calls/declarations, force each parameter to have its own line
BinPackParameters: 'false'
BinPackArguments: 'false'
# TODO: uncomment me when we are reading to rearrange the header includes
# IncludeBlocks: Regroup
# IncludeCategories: 'llarp/'

@ -1,2 +1,2 @@
HeaderFilterRegex: 'llarp/.*'
Checks: 'readability-else-after-return,clang-analyzer-core-*,modernize-*,-modernize-use-trailing-return-type,-modernize-use-nodiscard,bugprone-*,-bugprone-easily-swappable-parameters'
Checks: 'readability-else-after-return,clang-analyzer-core-*,modernize-*,-modernize-use-trailing-return-type,-modernize-use-nodiscard,bugprone-*'

@ -1,450 +1,293 @@
local default_deps_base = [
'libsystemd-dev',
'python3-dev',
'libuv1-dev',
'libunbound-dev',
'nettle-dev',
'libssl-dev',
'libevent-dev',
'libsqlite3-dev',
'libcurl4-openssl-dev',
'libzmq3-dev',
'make',
];
local default_deps_nocxx = ['libsodium-dev'] + default_deps_base; // libsodium-dev needs to be >= 1.0.18
local default_deps = ['g++'] + default_deps_nocxx;
local default_deps_base='libsystemd-dev python3-dev libuv1-dev libunbound-dev nettle-dev libssl-dev libevent-dev libsqlite3-dev libcurl4-openssl-dev';
local default_deps_nocxx='libsodium-dev ' + default_deps_base; // libsodium-dev needs to be >= 1.0.18
local default_deps='g++ ' + default_deps_nocxx; // g++ sometimes needs replacement
local default_windows_deps='mingw-w64 zip nsis';
local docker_base = 'registry.oxen.rocks/lokinet-ci-';
local submodule_commands = [
'git fetch --tags',
'git submodule update --init --recursive --depth=1 --jobs=4',
];
local submodules = {
name: 'submodules',
image: 'drone/git',
commands: submodule_commands,
name: 'submodules',
image: 'drone/git',
commands: ['git fetch --tags', 'git submodule update --init --recursive --depth=1']
};
// cmake options for static deps mirror
local ci_dep_mirror(want_mirror) = (if want_mirror then ' -DLOCAL_MIRROR=https://oxen.rocks/deps ' else '');
local apt_get_quiet = 'apt-get -o=Dpkg::Use-Pty=0 -q';
// Regular build on a debian-like system:
local debian_pipeline(name,
image,
arch='amd64',
deps=default_deps,
build_type='Release',
lto=false,
werror=true,
cmake_extra='',
local_mirror=true,
extra_cmds=[],
jobs=6,
tests=true,
oxen_repo=false,
allow_fail=false) = {
kind: 'pipeline',
type: 'docker',
name: name,
platform: { arch: arch },
trigger: { branch: { exclude: ['debian/*', 'ubuntu/*'] } },
steps: [
submodules,
{
name: 'build',
image: image,
pull: 'always',
[if allow_fail then 'failure']: 'ignore',
environment: { SSH_KEY: { from_secret: 'SSH_KEY' } },
commands: [
'echo "Building on ${DRONE_STAGE_MACHINE}"',
'echo "man-db man-db/auto-update boolean false" | debconf-set-selections',
apt_get_quiet + ' update',
apt_get_quiet + ' install -y eatmydata',
] + (
if oxen_repo then [
'eatmydata ' + apt_get_quiet + ' install --no-install-recommends -y lsb-release',
'cp contrib/deb.oxen.io.gpg /etc/apt/trusted.gpg.d',
'echo deb http://deb.oxen.io $$(lsb_release -sc) main >/etc/apt/sources.list.d/oxen.list',
'eatmydata ' + apt_get_quiet + ' update',
] else []
local debian_pipeline(name, image,
arch='amd64',
deps=default_deps,
build_type='Release',
lto=false,
werror=true,
cmake_extra='',
extra_cmds=[],
jobs=6,
loki_repo=false,
allow_fail=false) = {
kind: 'pipeline',
type: 'docker',
name: name,
platform: { arch: arch },
trigger: { branch: { exclude: ['debian/*', 'ubuntu/*'] } },
steps: [
submodules,
{
name: 'build',
image: image,
[if allow_fail then "failure"]: "ignore",
environment: { SSH_KEY: { from_secret: "SSH_KEY" } },
commands: [
'echo "Building on ${DRONE_STAGE_MACHINE}"',
'echo "man-db man-db/auto-update boolean false" | debconf-set-selections',
apt_get_quiet + ' update',
apt_get_quiet + ' install -y eatmydata',
] + (if loki_repo then [
'eatmydata ' + apt_get_quiet + ' install -y lsb-release',
'cp contrib/deb.loki.network.gpg /etc/apt/trusted.gpg.d',
'echo deb http://deb.loki.network $$(lsb_release -sc) main >/etc/apt/sources.list.d/loki.network.list',
'eatmydata ' + apt_get_quiet + ' update'
] else []
) + [
'eatmydata ' + apt_get_quiet + ' dist-upgrade -y',
'eatmydata ' + apt_get_quiet + ' install --no-install-recommends -y gdb cmake git pkg-config ccache ' + std.join(' ', deps),
'mkdir build',
'cd build',
'cmake .. -DWITH_SETCAP=OFF -DCMAKE_CXX_FLAGS=-fdiagnostics-color=always -DCMAKE_BUILD_TYPE=' + build_type + ' ' +
(if build_type == 'Debug' then ' -DWARN_DEPRECATED=OFF ' else '') +
(if werror then '-DWARNINGS_AS_ERRORS=ON ' else '') +
'-DWITH_LTO=' + (if lto then 'ON ' else 'OFF ') +
'-DWITH_TESTS=' + (if tests then 'ON ' else 'OFF ') +
cmake_extra +
ci_dep_mirror(local_mirror),
'VERBOSE=1 make -j' + jobs,
'cd ..',
]
+ (if tests then ['./contrib/ci/drone-gdb.sh ./build/test/testAll --use-colour yes'] else [])
+ extra_cmds,
},
],
'eatmydata ' + apt_get_quiet + ' dist-upgrade -y',
'eatmydata ' + apt_get_quiet + ' install -y gdb cmake git ninja-build pkg-config ccache ' + deps,
'mkdir build',
'cd build',
'cmake .. -G Ninja -DWITH_SETCAP=OFF -DCMAKE_CXX_FLAGS=-fdiagnostics-color=always -DCMAKE_BUILD_TYPE='+build_type+' ' +
(if werror then '-DWARNINGS_AS_ERRORS=ON ' else '') +
'-DWITH_LTO=' + (if lto then 'ON ' else 'OFF ') +
cmake_extra,
'ninja -j' + jobs + ' -v',
'../contrib/ci/drone-gdb.sh ./test/testAll --use-colour yes',
] + extra_cmds,
}
],
};
local apk_builder(name, image, extra_cmds=[], allow_fail=false, jobs=6) = {
kind: 'pipeline',
type: 'docker',
name: name,
platform: { arch: 'amd64' },
trigger: { branch: { exclude: ['debian/*', 'ubuntu/*'] } },
steps: [
submodules,
{
name: 'build',
image: image,
pull: 'always',
[if allow_fail then 'failure']: 'ignore',
environment: { SSH_KEY: { from_secret: 'SSH_KEY' }, ANDROID: 'android' },
commands: [
'VERBOSE=1 JOBS=' + jobs + ' NDK=/usr/lib/android-ndk ./contrib/android.sh',
'git clone https://github.com/oxen-io/lokinet-flutter-app lokinet-mobile',
'cp -av build-android/out/* lokinet-mobile/lokinet_lib/android/src/main/jniLibs/',
'cd lokinet-mobile',
'flutter build apk --debug',
'cd ..',
'cp lokinet-mobile/build/app/outputs/apk/debug/app-debug.apk lokinet.apk',
] + extra_cmds,
},
],
local apk_builder(name, image, extra_cmds=[], allow_fail=false) = {
kind: 'pipeline',
type: 'docker',
name: name,
platform: {arch: "amd64"},
trigger: { branch: { exclude: ['debian/*', 'ubuntu/*'] } },
steps: [
submodules,
{
name: 'build',
image: image,
[if allow_fail then "failure"]: "ignore",
environment: { SSH_KEY: { from_secret: "SSH_KEY" }, ANDROID: "android" },
commands: [
"cd android",
"rm -f local.properties",
"echo 'sdk.dir=/usr/lib/android-sdk' >> local.properties",
"echo 'ndk.dir=/usr/lib/android-ndk' >> local.properties",
"GRADLE_USER_HOME=/cache/gradle/${DRONE_STAGE_MACHINE} gradle --no-daemon assembleDebug",
] + extra_cmds
}
]
};
// windows cross compile on debian
local windows_cross_pipeline(name,
image,
gui_image=docker_base + 'nodejs-lts',
arch='amd64',
build_type='Release',
lto=false,
werror=false,
cmake_extra='',
local_mirror=true,
extra_cmds=[],
jobs=6,
allow_fail=false) = {
kind: 'pipeline',
type: 'docker',
name: name,
platform: { arch: arch },
trigger: { branch: { exclude: ['debian/*', 'ubuntu/*'] } },
steps: [
submodules,
{
name: 'GUI',
image: gui_image,
pull: 'always',
[if allow_fail then 'failure']: 'ignore',
commands: [
'echo "Building on ${DRONE_STAGE_MACHINE}"',
'echo "man-db man-db/auto-update boolean false" | debconf-set-selections',
apt_get_quiet + ' update',
apt_get_quiet + ' install -y eatmydata',
'eatmydata ' + apt_get_quiet + ' install --no-install-recommends -y p7zip-full wine',
'cd gui',
'yarn install --frozen-lockfile',
'USE_SYSTEM_7ZA=true DISPLAY= WINEDEBUG=-all yarn win32',
],
},
{
name: 'build',
image: image,
pull: 'always',
[if allow_fail then 'failure']: 'ignore',
environment: { SSH_KEY: { from_secret: 'SSH_KEY' }, WINDOWS_BUILD_NAME: 'x64' },
commands: [
'echo "Building on ${DRONE_STAGE_MACHINE}"',
'echo "man-db man-db/auto-update boolean false" | debconf-set-selections',
apt_get_quiet + ' update',
apt_get_quiet + ' install -y eatmydata',
'eatmydata ' + apt_get_quiet + ' install --no-install-recommends -y build-essential cmake git pkg-config ccache g++-mingw-w64-x86-64-posix nsis zip icoutils automake libtool librsvg2-bin bison',
'JOBS=' + jobs + ' VERBOSE=1 ./contrib/windows.sh -DSTRIP_SYMBOLS=ON -DGUI_EXE=$${DRONE_WORKSPACE}/gui/release/Lokinet-GUI_portable.exe' +
ci_dep_mirror(local_mirror),
] + extra_cmds,
},
],
};
// linux cross compile on debian
local linux_cross_pipeline(name,
cross_targets,
arch='amd64',
build_type='Release',
cmake_extra='',
local_mirror=true,
extra_cmds=[],
jobs=6,
allow_fail=false) = {
kind: 'pipeline',
type: 'docker',
name: name,
platform: { arch: arch },
trigger: { branch: { exclude: ['debian/*', 'ubuntu/*'] } },
steps: [
submodules,
{
name: 'build',
image: docker_base + 'debian-stable-cross',
pull: 'always',
[if allow_fail then 'failure']: 'ignore',
environment: { SSH_KEY: { from_secret: 'SSH_KEY' }, CROSS_TARGETS: std.join(':', cross_targets) },
commands: [
'echo "Building on ${DRONE_STAGE_MACHINE}"',
'VERBOSE=1 JOBS=' + jobs + ' ./contrib/cross.sh ' + std.join(' ', cross_targets) +
' -- ' + cmake_extra + ci_dep_mirror(local_mirror),
],
},
],
local windows_cross_pipeline(name, image,
arch='amd64',
build_type='Release',
lto=false,
werror=false,
cmake_extra='',
toolchain='32',
extra_cmds=[],
jobs=6,
allow_fail=false) = {
kind: 'pipeline',
type: 'docker',
name: name,
platform: { arch: arch },
trigger: { branch: { exclude: ['debian/*', 'ubuntu/*'] } },
steps: [
submodules,
{
name: 'build',
image: image,
[if allow_fail then "failure"]: "ignore",
environment: { SSH_KEY: { from_secret: "SSH_KEY" }, WINDOWS_BUILD_NAME: toolchain+"bit" },
commands: [
'echo "Building on ${DRONE_STAGE_MACHINE}"',
'echo "man-db man-db/auto-update boolean false" | debconf-set-selections',
apt_get_quiet + ' update',
apt_get_quiet + ' install -y eatmydata',
'eatmydata ' + apt_get_quiet + ' install -y build-essential cmake git ninja-build pkg-config ccache g++-mingw-w64-x86-64-posix nsis zip automake libtool',
'update-alternatives --set x86_64-w64-mingw32-gcc /usr/bin/x86_64-w64-mingw32-gcc-posix',
'update-alternatives --set x86_64-w64-mingw32-g++ /usr/bin/x86_64-w64-mingw32-g++-posix',
'mkdir build',
'cd build',
'cmake .. -G Ninja -DCMAKE_EXE_LINKER_FLAGS=-fstack-protector -DCMAKE_CXX_FLAGS=-fdiagnostics-color=always -DCMAKE_TOOLCHAIN_FILE=../contrib/cross/mingw'+toolchain+'.cmake -DCMAKE_BUILD_TYPE='+build_type+' ' +
(if werror then '-DWARNINGS_AS_ERRORS=ON ' else '') +
(if lto then '' else '-DWITH_LTO=OFF ') +
"-DBUILD_STATIC_DEPS=ON -DDOWNLOAD_SODIUM=ON -DBUILD_PACKAGE=ON -DBUILD_SHARED_LIBS=OFF -DBUILD_TESTING=OFF -DNATIVE_BUILD=OFF -DSTATIC_LINK=ON" +
cmake_extra,
'ninja -j' + jobs + ' -v package',
] + extra_cmds,
}
],
};
// Builds a snapshot .deb on a debian-like system by merging into the debian/* or ubuntu/* branch
local deb_builder(image, distro, distro_branch, arch='amd64', oxen_repo=true) = {
kind: 'pipeline',
type: 'docker',
name: 'DEB (' + distro + (if arch == 'amd64' then '' else '/' + arch) + ')',
platform: { arch: arch },
environment: { distro_branch: distro_branch, distro: distro },
steps: [
submodules,
{
name: 'build',
image: image,
pull: 'always',
failure: 'ignore',
environment: { SSH_KEY: { from_secret: 'SSH_KEY' } },
commands: [
'echo "Building on ${DRONE_STAGE_MACHINE}"',
'echo "man-db man-db/auto-update boolean false" | debconf-set-selections',
] + (if oxen_repo then [
'cp contrib/deb.oxen.io.gpg /etc/apt/trusted.gpg.d',
'echo deb http://deb.oxen.io $${distro} main >/etc/apt/sources.list.d/oxen.list',
] else []) + [
apt_get_quiet + ' update',
apt_get_quiet + ' install -y eatmydata',
'eatmydata ' + apt_get_quiet + ' install --no-install-recommends -y git devscripts equivs ccache git-buildpackage python3-dev',
|||
# Look for the debian branch in this repo first, try upstream if that fails.
if ! git checkout $${distro_branch}; then
git remote add --fetch upstream https://github.com/oxen-io/lokinet.git &&
git checkout $${distro_branch}
fi
|||,
// Tell the merge how to resolve conflicts in the source .drone.jsonnet (we don't
// care about it at all since *this* .drone.jsonnet is already loaded).
'git config merge.ours.driver true',
'echo .drone.jsonnet merge=ours >>.gitattributes',
local deb_builder(image, distro, distro_branch, arch='amd64', loki_repo=true) = {
kind: 'pipeline',
type: 'docker',
name: 'DEB (' + distro + (if arch == 'amd64' then '' else '/' + arch) + ')',
platform: { arch: arch },
environment: { distro_branch: distro_branch, distro: distro },
steps: [
submodules,
{
name: 'build',
image: image,
failure: 'ignore',
environment: { SSH_KEY: { from_secret: "SSH_KEY" } },
commands: [
'echo "Building on ${DRONE_STAGE_MACHINE}"',
'echo "man-db man-db/auto-update boolean false" | debconf-set-selections'
] + (if loki_repo then [
'cp contrib/deb.loki.network.gpg /etc/apt/trusted.gpg.d',
'echo deb http://deb.loki.network $${distro} main >/etc/apt/sources.list.d/loki.network.list'
] else []) + [
apt_get_quiet + ' update',
apt_get_quiet + ' install -y eatmydata',
'eatmydata ' + apt_get_quiet + ' install -y git devscripts equivs ccache git-buildpackage python3-dev',
|||
# Look for the debian branch in this repo first, try upstream if that fails.
if ! git checkout $${distro_branch}; then
git remote add --fetch upstream https://github.com/oxen-io/loki-network.git &&
git checkout $${distro_branch}
fi
|||,
# Tell the merge how to resolve conflicts in the source .drone.jsonnet (we don't
# care about it at all since *this* .drone.jsonnet is already loaded).
'git config merge.ours.driver true',
'echo .drone.jsonnet merge=ours >>.gitattributes',
'git merge ${DRONE_COMMIT}',
'export DEBEMAIL="${DRONE_COMMIT_AUTHOR_EMAIL}" DEBFULLNAME="${DRONE_COMMIT_AUTHOR_NAME}"',
'gbp dch -S -s "HEAD^" --spawn-editor=never -U low',
'eatmydata mk-build-deps --install --remove --tool "' + apt_get_quiet + ' -o Debug::pkgProblemResolver=yes --no-install-recommends -y"',
'export DEB_BUILD_OPTIONS="parallel=$$(nproc)"',
//'grep -q lib debian/lokinet-bin.install || echo "/usr/lib/lib*.so*" >>debian/lokinet-bin.install',
'debuild -e CCACHE_DIR -b',
'./contrib/ci/drone-debs-upload.sh ' + distro,
],
},
],
'git merge ${DRONE_COMMIT}',
'export DEBEMAIL="${DRONE_COMMIT_AUTHOR_EMAIL}" DEBFULLNAME="${DRONE_COMMIT_AUTHOR_NAME}"',
'gbp dch -S -s "HEAD^" --spawn-editor=never -U low',
'eatmydata mk-build-deps --install --remove --tool "' + apt_get_quiet + ' -o Debug::pkgProblemResolver=yes --no-install-recommends -y"',
'export DEB_BUILD_OPTIONS="parallel=$$(nproc)"',
#'grep -q lib debian/lokinet-bin.install || echo "/usr/lib/lib*.so*" >>debian/lokinet-bin.install',
'debuild -e CCACHE_DIR -b',
'./contrib/ci/drone-debs-upload.sh ' + distro,
]
}
]
};
local clang(version) = debian_pipeline(
'Debian sid/clang-' + version + ' (amd64)',
docker_base + 'debian-sid-clang',
deps=['clang-' + version] + default_deps_nocxx,
cmake_extra='-DCMAKE_C_COMPILER=clang-' + version + ' -DCMAKE_CXX_COMPILER=clang++-' + version + ' '
);
local full_llvm(version) = debian_pipeline(
'Debian sid/llvm-' + version + ' (amd64)',
docker_base + 'debian-sid-clang',
deps=['clang-' + version, ' lld-' + version, ' libc++-' + version + '-dev', 'libc++abi-' + version + '-dev']
+ default_deps_nocxx,
cmake_extra='-DCMAKE_C_COMPILER=clang-' + version +
' -DCMAKE_CXX_COMPILER=clang++-' + version +
' -DCMAKE_CXX_FLAGS=-stdlib=libc++ ' +
std.join(' ', [
'-DCMAKE_' + type + '_LINKER_FLAGS=-fuse-ld=lld-' + version
for type in ['EXE', 'MODULE', 'SHARED']
])
);
// Macos build
local mac_builder(name,
build_type='Release',
werror=true,
cmake_extra='',
local_mirror=true,
extra_cmds=[],
jobs=6,
codesign='-DCODESIGN=OFF',
allow_fail=false) = {
kind: 'pipeline',
type: 'exec',
name: name,
platform: { os: 'darwin', arch: 'amd64' },
steps: [
{ name: 'submodules', commands: submodule_commands },
{
name: 'build',
environment: { SSH_KEY: { from_secret: 'SSH_KEY' } },
commands: [
'echo "Building on ${DRONE_STAGE_MACHINE}"',
// If you don't do this then the C compiler doesn't have an include path containing
// basic system headers. WTF apple:
'export SDKROOT="$(xcrun --sdk macosx --show-sdk-path)"',
'ulimit -n 1024', // because macos sets ulimit to 256 for some reason yeah idk
'./contrib/mac-configure.sh ' +
ci_dep_mirror(local_mirror) +
(if build_type == 'Debug' then ' -DWARN_DEPRECATED=OFF ' else '') +
codesign,
'cd build-mac',
// We can't use the 'package' target here because making a .dmg requires an active logged in
// macos gui to invoke Finder to invoke the partitioning tool to create a partitioned (!)
// disk image. Most likely the GUI is required because if you lose sight of how pretty the
// surface of macOS is you might see how ugly the insides are.
'ninja -j' + jobs + ' assemble_gui',
'cd ..',
] + extra_cmds,
},
],
};
local docs_pipeline(name, image, extra_cmds=[], allow_fail=false) = {
kind: 'pipeline',
type: 'docker',
name: name,
platform: { arch: 'amd64' },
trigger: { branch: { exclude: ['debian/*', 'ubuntu/*'] } },
steps: [
submodules,
{
name: 'build',
image: image,
pull: 'always',
[if allow_fail then 'failure']: 'ignore',
environment: { SSH_KEY: { from_secret: 'SSH_KEY' } },
commands: [
'cmake -S . -B build-docs',
'make -C build-docs doc',
] + extra_cmds,
},
],
build_type='Release',
werror=true,
cmake_extra='',
extra_cmds=[],
jobs=6,
allow_fail=false) = {
kind: 'pipeline',
type: 'exec',
name: name,
platform: { os: 'darwin', arch: 'amd64' },
steps: [
{ name: 'submodules', commands: ['git fetch --tags', 'git submodule update --init --recursive'] },
{
name: 'build',
environment: { SSH_KEY: { from_secret: "SSH_KEY" } },
commands: [
'echo "Building on ${DRONE_STAGE_MACHINE}"',
// If you don't do this then the C compiler doesn't have an include path containing
// basic system headers. WTF apple:
'export SDKROOT="$(xcrun --sdk macosx --show-sdk-path)"',
'ulimit -n 1024', // because macos sets ulimit to 256 for some reason yeah idk
'mkdir build',
'cd build',
'cmake .. -G Ninja -DCMAKE_CXX_FLAGS=-fcolor-diagnostics -DCMAKE_BUILD_TYPE='+build_type+' ' +
(if werror then '-DWARNINGS_AS_ERRORS=ON ' else '') + cmake_extra,
'ninja -j' + jobs + ' -v',
'./test/testAll --use-colour yes',
] + extra_cmds,
}
]
};
[
{
name: 'lint check',
kind: 'pipeline',
type: 'docker',
steps: [{
name: 'build',
image: docker_base + 'lint',
pull: 'always',
commands: [
'echo "Building on ${DRONE_STAGE_MACHINE}"',
apt_get_quiet + ' update',
apt_get_quiet + ' install -y eatmydata',
'eatmydata ' + apt_get_quiet + ' install --no-install-recommends -y git clang-format-14 jsonnet',
'./contrib/ci/drone-format-verify.sh',
],
}],
},
// documentation builder
docs_pipeline('Documentation',
docker_base + 'docbuilder',
extra_cmds=['UPLOAD_OS=docs ./contrib/ci/drone-static-upload.sh']),
// Various debian builds
debian_pipeline('Debian sid (amd64)', docker_base + 'debian-sid'),
debian_pipeline('Debian sid/Debug (amd64)', docker_base + 'debian-sid', build_type='Debug'),
clang(13),
full_llvm(13),
debian_pipeline('Debian stable (i386)', docker_base + 'debian-stable/i386'),
debian_pipeline('Debian buster (amd64)', docker_base + 'debian-buster', cmake_extra='-DDOWNLOAD_SODIUM=ON'),
debian_pipeline('Ubuntu latest (amd64)', docker_base + 'ubuntu-rolling'),
debian_pipeline('Ubuntu LTS (amd64)', docker_base + 'ubuntu-lts'),
debian_pipeline('Ubuntu bionic (amd64)',
docker_base + 'ubuntu-bionic',
deps=['g++-8'] + default_deps_nocxx,
cmake_extra='-DCMAKE_C_COMPILER=gcc-8 -DCMAKE_CXX_COMPILER=g++-8',
oxen_repo=true),
// ARM builds (ARM64 and armhf)
debian_pipeline('Debian sid (ARM64)', docker_base + 'debian-sid', arch='arm64', jobs=4),
debian_pipeline('Debian stable (armhf)', docker_base + 'debian-stable/arm32v7', arch='arm64', jobs=4),
// cross compile targets
// Aug 11: these are exhibiting some dumb failures in libsodium and external deps, TOFIX later
//linux_cross_pipeline('Cross Compile (arm/arm64)', cross_targets=['arm-linux-gnueabihf', 'aarch64-linux-gnu']),
//linux_cross_pipeline('Cross Compile (ppc64le)', cross_targets=['powerpc64le-linux-gnu']),
// Not currently building successfully:
//linux_cross_pipeline('Cross Compile (mips)', cross_targets=['mips-linux-gnu', 'mipsel-linux-gnu']),
{
name: 'lint check',
kind: 'pipeline',
type: 'docker',
steps: [{
name: 'build', image: 'registry.oxen.rocks/lokinet-ci-lint',
commands: [
'echo "Building on ${DRONE_STAGE_MACHINE}"',
apt_get_quiet + ' update',
apt_get_quiet + ' install -y eatmydata',
'eatmydata ' + apt_get_quiet + ' install -y git clang-format-11',
'./contrib/ci/drone-format-verify.sh']
}]
},
// android apk builder
// Aug 11: this is also failing in openssl, TOFIX later
//apk_builder('android apk', docker_base + 'flutter', extra_cmds=['UPLOAD_OS=android ./contrib/ci/drone-static-upload.sh']),
// Various debian builds
debian_pipeline("Debian sid (amd64)", "debian:sid"),
debian_pipeline("Debian sid/Debug (amd64)", "debian:sid", build_type='Debug'),
debian_pipeline("Debian sid/clang-11 (amd64)", docker_base+'debian-sid', deps='clang-11 '+default_deps_nocxx,
cmake_extra='-DCMAKE_C_COMPILER=clang-11 -DCMAKE_CXX_COMPILER=clang++-11 '),
debian_pipeline("Debian buster (i386)", "i386/debian:buster", cmake_extra='-DDOWNLOAD_SODIUM=ON'),
debian_pipeline("Ubuntu focal (amd64)", docker_base+'ubuntu-focal'),
debian_pipeline("Ubuntu bionic (amd64)", "ubuntu:bionic", deps='g++-8 ' + default_deps_nocxx,
cmake_extra='-DCMAKE_C_COMPILER=gcc-8 -DCMAKE_CXX_COMPILER=g++-8', loki_repo=true),
// Windows builds (x64)
windows_cross_pipeline('Windows (amd64)',
docker_base + 'debian-bookworm',
extra_cmds=[
'./contrib/ci/drone-static-upload.sh',
]),
// ARM builds (ARM64 and armhf)
debian_pipeline("Debian sid (ARM64)", "debian:sid", arch="arm64", jobs=4),
debian_pipeline("Debian buster (armhf)", "arm32v7/debian:buster", arch="arm64", cmake_extra='-DDOWNLOAD_SODIUM=ON', jobs=4),
// Static armhf build (gets uploaded)
debian_pipeline("Static (buster armhf)", "arm32v7/debian:buster", arch="arm64", deps='g++ python3-dev automake libtool',
cmake_extra='-DBUILD_STATIC_DEPS=ON -DBUILD_SHARED_LIBS=OFF -DSTATIC_LINK=ON ' +
'-DCMAKE_CXX_FLAGS="-march=armv7-a+fp" -DCMAKE_C_FLAGS="-march=armv7-a+fp" -DNATIVE_BUILD=OFF ' +
'-DWITH_SYSTEMD=OFF',
extra_cmds=[
'../contrib/ci/drone-check-static-libs.sh',
'UPLOAD_OS=linux-armhf ../contrib/ci/drone-static-upload.sh'
],
jobs=4),
// android apk builder
apk_builder("android apk", "registry.oxen.rocks/lokinet-ci-android", extra_cmds=['UPLOAD_OS=anrdoid ../contrib/ci/drone-static-upload.sh']),
// Windows builds (x64)
windows_cross_pipeline("Windows (amd64)", docker_base+'debian-win32-cross',
toolchain='64', extra_cmds=[
'../contrib/ci/drone-static-upload.sh'
]),
// Static build (on bionic) which gets uploaded to builds.lokinet.dev:
debian_pipeline('Static (bionic amd64)',
docker_base + 'ubuntu-bionic',
deps=['g++-8', 'python3-dev', 'automake', 'libtool'],
lto=true,
tests=false,
oxen_repo=true,
cmake_extra='-DBUILD_STATIC_DEPS=ON -DBUILD_SHARED_LIBS=OFF -DSTATIC_LINK=ON ' +
'-DCMAKE_C_COMPILER=gcc-8 -DCMAKE_CXX_COMPILER=g++-8 ' +
'-DCMAKE_CXX_FLAGS="-march=x86-64 -mtune=haswell" ' +
'-DCMAKE_C_FLAGS="-march=x86-64 -mtune=haswell" ' +
'-DNATIVE_BUILD=OFF -DWITH_SYSTEMD=OFF -DWITH_BOOTSTRAP=OFF -DBUILD_LIBLOKINET=OFF',
extra_cmds=[
'./contrib/ci/drone-check-static-libs.sh',
'./contrib/ci/drone-static-upload.sh',
]),
// Static armhf build (gets uploaded)
debian_pipeline('Static (buster armhf)',
docker_base + 'debian-buster/arm32v7',
arch='arm64',
deps=['g++', 'python3-dev', 'automake', 'libtool'],
cmake_extra='-DBUILD_STATIC_DEPS=ON -DBUILD_SHARED_LIBS=OFF -DSTATIC_LINK=ON ' +
'-DCMAKE_CXX_FLAGS="-march=armv7-a+fp -Wno-psabi" -DCMAKE_C_FLAGS="-march=armv7-a+fp" ' +
'-DNATIVE_BUILD=OFF -DWITH_SYSTEMD=OFF -DWITH_BOOTSTRAP=OFF',
extra_cmds=[
'./contrib/ci/drone-check-static-libs.sh',
'UPLOAD_OS=linux-armhf ./contrib/ci/drone-static-upload.sh',
],
jobs=4),
// Static build (on bionic) which gets uploaded to builds.lokinet.dev:
debian_pipeline("Static (bionic amd64)", docker_base+'ubuntu-bionic', deps='g++-8 python3-dev automake libtool', lto=true,
cmake_extra='-DBUILD_STATIC_DEPS=ON -DBUILD_SHARED_LIBS=OFF -DSTATIC_LINK=ON -DCMAKE_C_COMPILER=gcc-8 -DCMAKE_CXX_COMPILER=g++-8 ' +
'-DCMAKE_CXX_FLAGS="-march=x86-64 -mtune=haswell" -DCMAKE_C_FLAGS="-march=x86-64 -mtune=haswell" -DNATIVE_BUILD=OFF ' +
'-DWITH_SYSTEMD=OFF',
extra_cmds=[
'../contrib/ci/drone-check-static-libs.sh',
'../contrib/ci/drone-static-upload.sh'
]),
// integration tests
debian_pipeline('Router Hive',
docker_base + 'ubuntu-lts',
deps=['python3-dev', 'python3-pytest', 'python3-pybind11'] + default_deps,
cmake_extra='-DWITH_HIVE=ON'),
// integration tests
debian_pipeline("Router Hive", "ubuntu:focal", deps='python3-dev python3-pytest python3-pybind11 ' + default_deps,
cmake_extra='-DWITH_HIVE=ON'),
// Deb builds:
deb_builder(docker_base + 'debian-sid-builder', 'sid', 'debian/sid'),
deb_builder(docker_base + 'debian-bullseye-builder', 'bullseye', 'debian/bullseye'),
deb_builder(docker_base + 'ubuntu-jammy-builder', 'jammy', 'ubuntu/jammy'),
deb_builder(docker_base + 'debian-sid-builder', 'sid', 'debian/sid', arch='arm64'),
// Deb builds:
deb_builder("debian:sid", "sid", "debian/sid"),
deb_builder("debian:buster", "buster", "debian/buster"),
deb_builder("ubuntu:focal", "focal", "ubuntu/focal"),
deb_builder("debian:sid", "sid", "debian/sid", arch='arm64'),
// Macos builds:
mac_builder('macOS (Release)', extra_cmds=[
'./contrib/ci/drone-check-static-libs.sh',
'./contrib/ci/drone-static-upload.sh',
]),
mac_builder('macOS (Debug)', build_type='Debug'),
// Macos builds:
mac_builder('macOS (Release)'),
mac_builder('macOS (Debug)', build_type='Debug'),
mac_builder('macOS (Static)', cmake_extra='-DBUILD_STATIC_DEPS=ON -DBUILD_SHARED_LIBS=OFF -DSTATIC_LINK=ON -DDOWNLOAD_SODIUM=FORCE -DDOWNLOAD_CURL=FORCE -DDOWNLOAD_UV=FORCE',
extra_cmds=[
'../contrib/ci/drone-check-static-libs.sh',
'../contrib/ci/drone-static-upload.sh'
]),
]

1
.gitattributes vendored

@ -2,4 +2,3 @@ external/date/test export-ignore
external/nlohmann/doc export-ignore
external/nlohmann/test export-ignore
external/nlohmann/benchmarks/data export-ignore
*.signed binary

@ -1,22 +0,0 @@
name: Close incomplete issues
on:
schedule:
- cron: "30 1 * * *"
jobs:
close-issues:
runs-on: ubuntu-latest
permissions:
issues: write
steps:
- uses: actions/stale@v4.1.1
with:
only-labels: incomplete
days-before-issue-stale: 14
days-before-issue-close: 7
stale-issue-label: "stale"
stale-issue-message: "This issue is stale because it has been 'incomplete' for 14 days with no activity."
close-issue-message: "This issue was closed because it has been inactive for 7 days since being marked as stale."
days-before-pr-stale: -1
days-before-pr-close: -1
repo-token: ${{ secrets.GITHUB_TOKEN }}

2
.gitignore vendored

@ -26,8 +26,6 @@ callgrind.*
*.sig
*.signed
!/contrib/bootstrap/mainnet.signed
!/contrib/bootstrap/testnet.signed
*.key
shadow.data

22
.gitmodules vendored

@ -1,16 +1,25 @@
[submodule "external/nlohmann"]
path = external/nlohmann
url = https://github.com/nlohmann/json.git
[submodule "external/cxxopts"]
path = external/cxxopts
url = https://github.com/jarro2783/cxxopts.git
[submodule "external/ghc-filesystem"]
path = external/ghc-filesystem
url = https://github.com/gulrak/filesystem.git
[submodule "test/Catch2"]
path = test/Catch2
url = https://github.com/catchorg/Catch2
[submodule "external/date"]
path = external/date
url = https://github.com/HowardHinnant/date.git
[submodule "external/pybind11"]
path = external/pybind11
url = https://github.com/pybind/pybind11
branch = stable
[submodule "external/clang-format-hooks"]
path = external/clang-format-hooks
url = https://github.com/barisione/clang-format-hooks/
[submodule "external/sqlite_orm"]
path = external/sqlite_orm
url = https://github.com/fnc12/sqlite_orm
@ -26,16 +35,3 @@
[submodule "external/ngtcp2"]
path = external/ngtcp2
url = https://github.com/ngtcp2/ngtcp2.git
branch = v0.1.0
[submodule "external/oxen-encoding"]
path = external/oxen-encoding
url = https://github.com/oxen-io/oxen-encoding.git
[submodule "external/oxen-logging"]
path = external/oxen-logging
url = https://github.com/oxen-io/oxen-logging.git
[submodule "gui"]
path = gui
url = https://github.com/oxen-io/lokinet-gui.git
[submodule "external/CLI11"]
path = external/CLI11
url = https://github.com/CLIUtils/CLI11.git

@ -1,21 +1,13 @@
cmake_minimum_required(VERSION 3.13...3.24) # 3.13 is buster's version
cmake_minimum_required(VERSION 3.10) # bionic's cmake version
set(CMAKE_EXPORT_COMPILE_COMMANDS ON)
# Has to be set before `project()`, and ignored on non-macos:
set(CMAKE_OSX_DEPLOYMENT_TARGET 10.15 CACHE STRING "macOS deployment target (Apple clang only)")
option(BUILD_DAEMON "build lokinet daemon and associated utils" ON)
set(LANGS C CXX)
if(APPLE AND BUILD_DAEMON)
set(LANGS ${LANGS} OBJC Swift)
endif()
set(CMAKE_OSX_DEPLOYMENT_TARGET 10.12 CACHE STRING "macOS deployment target (Apple clang only)")
find_program(CCACHE_PROGRAM ccache)
if(CCACHE_PROGRAM)
foreach(lang ${LANGS})
foreach(lang C CXX)
if(NOT DEFINED CMAKE_${lang}_COMPILER_LAUNCHER AND NOT CMAKE_${lang}_COMPILER MATCHES ".*/ccache")
message(STATUS "Enabling ccache for ${lang}")
set(CMAKE_${lang}_COMPILER_LAUNCHER ${CCACHE_PROGRAM} CACHE STRING "")
@ -23,53 +15,44 @@ if(CCACHE_PROGRAM)
endforeach()
endif()
project(lokinet
VERSION 0.9.11
VERSION 0.9.3
DESCRIPTION "lokinet - IP packet onion router"
LANGUAGES ${LANGS})
LANGUAGES C CXX)
if(APPLE)
# Apple build number: must be incremented to submit a new build for the same lokinet version,
# should be reset to 0 when the lokinet version increments.
set(LOKINET_APPLE_BUILD 5)
set(RELEASE_MOTTO "A Series of Tubes" CACHE STRING "Release motto")
add_definitions(-DLLARP_VERSION_MAJOR=${lokinet_VERSION_MAJOR})
add_definitions(-DLLARP_VERSION_MINOR=${lokinet_VERSION_MINOR})
add_definitions(-DLLARP_VERSION_PATCH=${lokinet_VERSION_PATCH})
if(RELEASE_MOTTO AND CMAKE_BUILD_TYPE MATCHES "[Rr][Ee][Ll][Ee][Aa][Ss][Ee]")
add_definitions(-DLLARP_RELEASE_MOTTO="${RELEASE_MOTTO}")
endif()
set(RELEASE_MOTTO "Our Lord And Savior" CACHE STRING "Release motto")
list(APPEND CMAKE_MODULE_PATH "${CMAKE_CURRENT_LIST_DIR}/cmake")
set(DEFAULT_WITH_BOOTSTRAP ON)
if(APPLE)
set(DEFAULT_WITH_BOOTSTRAP OFF)
endif()
# Core options
option(USE_AVX2 "enable avx2 code" OFF)
option(USE_NETNS "enable networking namespace support. Linux only" OFF)
option(NATIVE_BUILD "optimise for host system and FPU" ON)
option(WITH_EMBEDDED_LOKINET "build liblokinet.so for embedded lokinet" OFF)
option(EMBEDDED_CFG "optimise for older hardware or embedded systems" OFF)
option(BUILD_LIBLOKINET "build liblokinet.so" ON)
option(SHADOW "use shadow testing framework. linux only" OFF)
option(XSAN "use sanitiser, if your system has it (requires -DCMAKE_BUILD_TYPE=Debug)" OFF)
option(USE_JEMALLOC "Link to jemalloc for memory allocations, if found" ON)
option(WITH_JEMALLOC "use jemalloc as allocator" OFF)
option(TESTNET "testnet build" OFF)
option(WITH_COVERAGE "generate coverage data" OFF)
option(USE_SHELLHOOKS "enable shell hooks on compile time (dangerous)" OFF)
option(WARNINGS_AS_ERRORS "treat all warnings as errors. turn off for development, on for release" OFF)
option(WITH_TESTS "build unit tests" OFF)
option(TRACY_ROOT "include tracy profiler source" OFF)
option(WITH_TESTS "build unit tests" ON)
option(WITH_HIVE "build simulation stubs" OFF)
option(BUILD_PACKAGE "builds extra components for making an installer (with 'make package')" OFF)
option(WITH_BOOTSTRAP "build lokinet-bootstrap tool" ${DEFAULT_WITH_BOOTSTRAP})
option(WITH_PEERSTATS "build with experimental peerstats db support" OFF)
option(STRIP_SYMBOLS "strip off all debug symbols into an external archive for all executables built" OFF)
set(BOOTSTRAP_FALLBACK_MAINNET "${PROJECT_SOURCE_DIR}/contrib/bootstrap/mainnet.signed" CACHE PATH "Fallback bootstrap path (mainnet)")
set(BOOTSTRAP_FALLBACK_TESTNET "${PROJECT_SOURCE_DIR}/contrib/bootstrap/testnet.signed" CACHE PATH "Fallback bootstrap path (testnet)")
include(cmake/enable_lto.cmake)
option(CROSS_PLATFORM "cross compiler platform" "Linux")
option(CROSS_PREFIX "toolchain cross compiler prefix" "")
option(BUILD_STATIC_DEPS "Download, build, and statically link against core dependencies" OFF)
option(STATIC_LINK "link statically against dependencies" ${BUILD_STATIC_DEPS})
if(BUILD_STATIC_DEPS AND NOT STATIC_LINK)
@ -84,19 +67,6 @@ if(NOT CMAKE_BUILD_TYPE)
set(CMAKE_BUILD_TYPE RelWithDebInfo)
endif()
set(debug OFF)
if(CMAKE_BUILD_TYPE MATCHES "[Dd][Ee][Bb][Uu][Gg]")
set(debug ON)
add_definitions(-DLOKINET_DEBUG)
endif()
option(WARN_DEPRECATED "show deprecation warnings" ${debug})
if(BUILD_STATIC_DEPS AND STATIC_LINK)
message(STATUS "we are building static deps so we won't build shared libs")
set(BUILD_SHARED_LIBS OFF CACHE BOOL "")
endif()
include(CheckCXXSourceCompiles)
include(CheckLibraryExists)
set(CMAKE_CXX_STANDARD 17)
@ -108,18 +78,27 @@ set(CMAKE_C_EXTENSIONS OFF)
include(cmake/target_link_libraries_system.cmake)
include(cmake/add_import_library.cmake)
include(cmake/add_log_tag.cmake)
include(cmake/libatomic.cmake)
include(cmake/link_dep_libs.cmake)
if (STATIC_LINK)
set(CMAKE_FIND_LIBRARY_SUFFIXES ${CMAKE_STATIC_LIBRARY_SUFFIX})
message(STATUS "setting static library suffix search")
endif()
include(cmake/gui-option.cmake)
add_definitions(-D${CMAKE_SYSTEM_NAME})
if(MSVC_VERSION)
enable_language(ASM_MASM)
list(APPEND CMAKE_ASM_MASM_SOURCE_FILE_EXTENSIONS s)
add_definitions(-D_WIN32_WINNT=0x0600 -DNOMINMAX -DSODIUM_STATIC)
else()
enable_language(ASM)
endif()
include(cmake/solaris.cmake)
include(cmake/win32.cmake)
include(cmake/macos.cmake)
# No in-source building
include(MacroEnsureOutOfSourceBuild)
@ -148,6 +127,7 @@ endif()
find_package(PkgConfig REQUIRED)
if(NOT BUILD_STATIC_DEPS)
pkg_check_modules(LIBUV libuv>=1.18.0 IMPORTED_TARGET)
endif()
@ -156,7 +136,7 @@ if(LIBUV_FOUND AND NOT BUILD_STATIC_DEPS)
target_link_libraries(libuv INTERFACE PkgConfig::LIBUV)
else()
if(NOT BUILD_STATIC_DEPS)
message(FATAL_ERROR "Could not find libuv >= 1.28.0; install it on your system or use -DBUILD_STATIC_DEPS=ON")
message(FATAL_ERROR "Could not find libu >= 1.28.0; install it on your system or use -DBUILD_STATIC_DEPS=ON")
endif()
endif()
@ -183,19 +163,28 @@ if(NOT TARGET sodium)
export(TARGETS sodium NAMESPACE sodium:: FILE sodium-exports.cmake)
endif()
set(warning_flags -Wall -Wextra -Wno-unknown-pragmas -Wno-unused-function -Werror=vla)
if(CMAKE_CXX_COMPILER_ID MATCHES "Clang")
list(APPEND warning_flags -Wno-unknown-warning-option)
option(FORCE_OXENMQ_SUBMODULE "force using oxenmq submodule" OFF)
if(NOT FORCE_OXENMQ_SUBMODULE)
pkg_check_modules(OXENMQ liboxenmq>=1.2.4)
endif()
if(WARN_DEPRECATED)
list(APPEND warning_flags -Wdeprecated-declarations)
if(OXENMQ_FOUND)
add_library(oxenmq INTERFACE)
link_dep_libs(oxenmq INTERFACE "${OXENMQ_LIBRARY_DIRS}" ${OXENMQ_LIBRARIES})
target_include_directories(oxenmq INTERFACE ${OXENMQ_INCLUDE_DIRS})
add_library(oxenmq::oxenmq ALIAS oxenmq)
message(STATUS "Found system liboxenmq ${OXENMQ_VERSION}")
else()
list(APPEND warning_flags -Wno-deprecated-declarations)
message(STATUS "using oxenmq submodule")
add_subdirectory(${CMAKE_SOURCE_DIR}/external/oxen-mq)
endif()
# If we blindly add these directly as compile_options then they get passed to swiftc on Apple and
# break, so we use a generate expression to set them only for C++/C/ObjC
add_compile_options("$<$<OR:$<COMPILE_LANGUAGE:CXX>,$<COMPILE_LANGUAGE:C>,$<COMPILE_LANGUAGE:OBJC>>:${warning_flags}>")
# this is messing with release builds
add_compile_options(-U_FORTIFY_SOURCE -D_FORTIFY_SOURCE=0)
if (NOT CMAKE_SYSTEM_NAME MATCHES "Linux" AND SHADOW)
message( FATAL_ERROR "shadow-framework is Linux only" )
endif()
if(XSAN)
string(APPEND CMAKE_CXX_FLAGS_DEBUG " -fsanitize=${XSAN} -fno-omit-frame-pointer -fno-sanitize-recover")
@ -205,6 +194,31 @@ if(XSAN)
message(STATUS "Doing a ${XSAN} sanitizer build")
endif()
if(CMAKE_BUILD_TYPE MATCHES "[Dd][Ee][Bb][Uu][Gg]")
add_definitions(-DLOKINET_DEBUG=1)
endif()
if(WITH_SHELLHOOKS)
add_definitions(-DENABLE_SHELLHOOKS)
endif()
if(TRACY_ROOT)
include_directories(${TRACY_ROOT})
add_definitions(-DTRACY_ENABLE)
endif()
if (CMAKE_CXX_COMPILER_ID MATCHES "Clang")
add_compile_options(-Wno-unknown-warning-option)
endif()
if (NOT MSVC_VERSION)
add_compile_options(-Wall -Wextra -Wno-unknown-pragmas)
# vla are evil
add_compile_options(-Werror=vla)
add_compile_options(-Wno-unused-function -Wno-deprecated-declarations)
endif()
include(cmake/coverage.cmake)
# these vars are set by the cmake toolchain spec
@ -212,20 +226,18 @@ if (WOW64_CROSS_COMPILE OR WIN64_CROSS_COMPILE)
include(cmake/cross_compile.cmake)
endif()
if(NOT APPLE)
if(NATIVE_BUILD)
if(CMAKE_SYSTEM_PROCESSOR STREQUAL ppc64le)
add_compile_options(-mcpu=native -mtune=native)
else()
add_compile_options(-march=native -mtune=native)
endif()
elseif(NOT NON_PC_TARGET)
if (USE_AVX2)
add_compile_options(-march=haswell -mtune=haswell -mfpmath=sse)
else()
# Public binary releases
add_compile_options(-march=nocona -mtune=haswell -mfpmath=sse)
endif()
if(NATIVE_BUILD)
if(CMAKE_SYSTEM_PROCESSOR STREQUAL ppc64le)
add_compile_options(-mcpu=native -mtune=native)
else()
add_compile_options(-march=native -mtune=native)
endif()
elseif(NOT NON_PC_TARGET)
if (USE_AVX2)
add_compile_options(-march=haswell -mtune=haswell -mfpmath=sse)
else()
# Public binary releases
add_compile_options(-march=nocona -mtune=haswell -mfpmath=sse)
endif()
endif()
@ -233,12 +245,22 @@ set(CMAKE_THREAD_PREFER_PTHREAD TRUE)
set(THREADS_PREFER_PTHREAD_FLAG TRUE)
find_package(Threads REQUIRED)
if(USE_NETNS)
add_definitions(-DNETNS=1)
else()
add_definitions(-DNETNS=0)
endif()
if(TESTNET)
add_definitions(-DTESTNET)
add_definitions(-DTESTNET=1)
# 5 times slower than realtime
# add_definitions(-DTESTNET_SPEED=5)
endif()
if(SHADOW)
include(cmake/shadow.cmake)
endif()
unset(GIT_VERSION)
unset(GIT_VERSION_REAL)
@ -273,18 +295,7 @@ if(WITH_SYSTEMD AND (NOT ANDROID))
endif()
add_subdirectory(external)
if(USE_JEMALLOC AND NOT STATIC_LINK)
pkg_check_modules(JEMALLOC jemalloc IMPORTED_TARGET)
if(JEMALLOC_FOUND)
target_link_libraries(base_libs INTERFACE PkgConfig::JEMALLOC)
else()
message(STATUS "jemalloc not found, not linking to jemalloc")
endif()
else()
message(STATUS "jemalloc support disabled")
endif()
include_directories(SYSTEM external/sqlite_orm/include)
if(ANDROID)
target_link_libraries(base_libs INTERFACE log)
@ -292,34 +303,34 @@ if(ANDROID)
set(ANDROID_PLATFORM_SRC android/ifaddrs.c)
endif()
if(TRACY_ROOT)
target_link_libraries(base_libs INTERFACE dl)
endif()
if(WITH_HIVE)
add_definitions(-DLOKINET_HIVE)
add_definitions(-DLOKINET_HIVE=1)
endif()
add_subdirectory(crypto)
add_subdirectory(llarp)
if(BUILD_DAEMON)
add_subdirectory(daemon)
endif()
add_subdirectory(daemon)
if(WITH_HIVE)
add_subdirectory(pybind)
endif()
if(WITH_TESTS OR WITH_HIVE)
add_subdirectory(test)
endif()
if(ANDROID)
add_subdirectory(jni)
endif()
add_subdirectory(docs)
include(cmake/gui.cmake)
if(APPLE)
macos_target_setup()
if (NOT SHADOW)
if(WITH_TESTS OR WITH_HIVE)
add_subdirectory(test)
endif()
if(ANDROID)
add_subdirectory(jni)
endif()
endif()
add_subdirectory(docs)
# uninstall target
if(NOT TARGET uninstall)
configure_file(
@ -331,10 +342,7 @@ if(NOT TARGET uninstall)
COMMAND ${CMAKE_COMMAND} -P ${CMAKE_CURRENT_BINARY_DIR}/cmake_uninstall.cmake)
endif()
if(BUILD_PACKAGE AND NOT APPLE)
include(cmake/installer.cmake)
endif()
if(TARGET package)
add_dependencies(package assemble_gui)
if(BUILD_PACKAGE)
include(cmake/installer.cmake)
endif()

@ -0,0 +1,28 @@
{
"configurations": [
{
"name": "x64-Debug",
"generator": "Ninja",
"configurationType": "Debug",
"inheritEnvironments": [ "msvc_x64_x64" ],
"buildRoot": "${env.USERPROFILE}\\CMakeBuilds\\${workspaceHash}\\build\\${name}",
"installRoot": "${env.USERPROFILE}\\CMakeBuilds\\${workspaceHash}\\install\\${name}",
"cmakeCommandArgs": "",
"buildCommandArgs": "-v",
"ctestCommandArgs": "",
"variables": []
},
{
"name": "x64-Release",
"generator": "Ninja",
"configurationType": "RelWithDebInfo",
"buildRoot": "${env.USERPROFILE}\\CMakeBuilds\\${workspaceHash}\\build\\${name}",
"installRoot": "${env.USERPROFILE}\\CMakeBuilds\\${workspaceHash}\\install\\${name}",
"cmakeCommandArgs": "",
"buildCommandArgs": "-v",
"ctestCommandArgs": "",
"inheritEnvironments": [ "msvc_x64_x64" ],
"variables": []
}
]
}

@ -3,7 +3,7 @@
* Act like a responsible adult.
* RUN `./contrib/format.sh` BEFORE COMMITING ALWAYS.
* RUN `make format` BEFORE COMMITING ALWAYS.
# Do NOT

@ -1,3 +1,16 @@
LokiNET is the reference implementation of LLARP (Low Latency Anonymous
Routing Protocol).
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Copyright (c) 2018-2020 The Loki Project
Copyright (c) 2018-2020 Jeff Becker
Windows NT port and portions Copyright (c) 2018-2020 Rick V.
GNU GENERAL PUBLIC LICENSE
Version 3, 29 June 2007
@ -671,4 +684,4 @@ into proprietary programs. If your program is a subroutine library, you
may consider it more useful to permit linking proprietary applications with
the library. If this is what you want to do, use the GNU Lesser General
Public License instead of this License. But first, please read
<https://www.gnu.org/licenses/why-not-lgpl.html>.
<https://www.gnu.org/licenses/why-not-lgpl.html>.

18
android/.gitignore vendored

@ -0,0 +1,18 @@
gen
tests
bin
libs
log*
obj
.gradle
.idea
.externalNativeBuild
ant.properties
local.properties
build.sh
android.iml
build
gradle
gradlew
gradlew.bat
gradle.properties

@ -0,0 +1,46 @@
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="network.loki.lokinet"
android:installLocation="auto"
android:versionCode="1"
android:versionName="0.8.4">
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />
<uses-permission android:name="android.permission.INTERNET" /> <!-- normal perm, per https://developer.android.com/guide/topics/permissions/normal-permissions.html -->
<uses-permission android:name="android.permission.ACCESS_NETWORK_STATE" />
<uses-permission android:name="android.permission.READ_PHONE_STATE" />
<uses-permission android:name="android.permission.READ_EXTERNAL_STORAGE" />
<!-- normal perm -->
<uses-permission android:name="android.permission.FOREGROUND_SERVICE" />
<application
android:allowBackup="true"
android:icon="@drawable/icon"
android:label="@string/app_name"
android:theme="@android:style/Theme.Holo.Light.DarkActionBar"
>
<receiver android:name=".NetworkStateChangeReceiver">
<intent-filter>
<action android:name="android.net.conn.CONNECTIVITY_CHANGE" />
</intent-filter>
</receiver>
<activity
android:name=".LokiNetActivity"
android:label="@string/app_name">
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
<service android:name=".LokinetDaemon"
android:enabled="true"
android:exported="true"
android:permission="android.permission.BIND_VPN_SERVICE">
<intent-filter>
<action android:name="android.net.VpnService"/>
</intent-filter>
</service>
</application>
</manifest>

@ -0,0 +1,81 @@
buildscript {
repositories {
mavenCentral()
jcenter()
google()
}
dependencies {
classpath 'com.android.tools.build:gradle:4.0.1'
}
}
apply plugin: 'com.android.application'
repositories {
jcenter()
maven {
url 'https://maven.google.com'
}
google()
}
android {
compileSdkVersion 28
defaultConfig {
applicationId "network.loki.lokinet"
targetSdkVersion 28
minSdkVersion 23
versionCode 1
versionName '0.8.4'
externalNativeBuild {
cmake {
targets "lokinet-android"
arguments "-DWITH_LTO=OFF", "-DCXXOPTS_BUILD_TESTS=OFF","-DWITH_TESTS=OFF", "-DCMAKE_CROSSCOMPILING=ON", "-DNATIVE_BUILD=OFF", "-DANDROID=ON", "-DANDROID_STL=c++_static", "-DBUILD_STATIC_DEPS=ON", "-DBUILD_SHARED_LIBS=OFF", "-DSTATIC_LINK=ON", "-DANDROID_ARM_MODE=arm", "-DFORCE_OXENMQ_SUBMODULE=ON", "-DBUILD_LIBLOKINET=OFF"
cppFlags "-std=c++17"
abiFilters 'x86', 'x86_64', 'armeabi-v7a', 'arm64-v8a'
// abiFilters 'armeabi-v7a'
// abiFilters 'arm64-v8a', 'x86_64', 'armeabi-v7a'
}
}
}
externalNativeBuild {
cmake {
path "../CMakeLists.txt"
}
}
sourceSets {
main {
manifest.srcFile 'AndroidManifest.xml'
java.srcDirs = ['src']
res.srcDirs = ['res']
jniLibs.srcDirs = ['libs']
assets.srcDirs = ['assets']
}
}
signingConfigs {
jeff {
storeFile file("jeff-apk.jks")
keyAlias "jeff-apk"
}
}
buildTypes {
release {
minifyEnabled true
//signingConfig signingConfigs.jeff
proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-project.txt'
debuggable false
}
debug {
// jniDebuggable true
}
}
compileOptions {
sourceCompatibility JavaVersion.VERSION_1_8
targetCompatibility JavaVersion.VERSION_1_8
}
}

@ -0,0 +1,96 @@
<?xml version="1.0" encoding="UTF-8"?>
<project name="lokinet" default="help">
<!-- The local.properties file is created and updated by the 'android' tool.
It contains the path to the SDK. It should *NOT* be checked into
Version Control Systems. -->
<property file="local.properties" />
<!-- The ant.properties file can be created by you. It is only edited by the
'android' tool to add properties to it.
This is the place to change some Ant specific build properties.
Here are some properties you may want to change/update:
source.dir
The name of the source directory. Default is 'src'.
out.dir
The name of the output directory. Default is 'bin'.
For other overridable properties, look at the beginning of the rules
files in the SDK, at tools/ant/build.xml
Properties related to the SDK location or the project target should
be updated using the 'android' tool with the 'update' action.
This file is an integral part of the build system for your
application and should be checked into Version Control Systems.
-->
<property file="ant.properties" />
<!-- if sdk.dir was not set from one of the property file, then
get it from the ANDROID_HOME env var.
This must be done before we load project.properties since
the proguard config can use sdk.dir -->
<property environment="env" />
<condition property="sdk.dir" value="${env.ANDROID_HOME}">
<isset property="env.ANDROID_HOME" />
</condition>
<!-- The project.properties file is created and updated by the 'android'
tool, as well as ADT.
This contains project specific properties such as project target, and library
dependencies. Lower level build properties are stored in ant.properties
(or in .classpath for Eclipse projects).
This file is an integral part of the build system for your
application and should be checked into Version Control Systems. -->
<loadproperties srcFile="project.properties" />
<!-- quick check on sdk.dir -->
<fail
message="sdk.dir is missing. Insert sdk.dir=... into './local.properties'. Make sure to generate local.properties using 'android update project' or to inject it through the ANDROID_HOME environment variable."
unless="sdk.dir"
/>
<fail
message="ndk.dir is missing. Insert ndk.dir=... into './local.properties'."
unless="ndk.dir"
/>
<!--
Import per project custom build rules if present at the root of the project.
This is the place to put custom intermediary targets such as:
-pre-build
-pre-compile
-post-compile (This is typically used for code obfuscation.
Compiled code location: ${out.classes.absolute.dir}
If this is not done in place, override ${out.dex.input.absolute.dir})
-post-package
-post-build
-pre-clean
-->
<import file="custom_rules.xml" optional="true" />
<!-- Import the actual build file.
To customize existing targets, there are two options:
- Customize only one target:
- copy/paste the target into this file, *before* the
<import> task.
- customize it to your needs.
- Customize the whole content of build.xml
- copy/paste the content of the rules files (minus the top node)
into this file, replacing the <import> task.
- customize to your needs.
***********************
****** IMPORTANT ******
***********************
In all cases you must update the value of version-tag below to read 'custom' instead of an integer,
in order to avoid having your file be overridden by tools such as "android update project"
-->
<!-- version-tag: 1 -->
<import file="${sdk.dir}/tools/ant/build.xml" />
</project>

@ -0,0 +1,20 @@
# To enable ProGuard in your project, edit project.properties
# to define the proguard.config property as described in that file.
#
# Add project specific ProGuard rules here.
# By default, the flags in this file are appended to flags specified
# in ${sdk.dir}/tools/proguard/proguard-android.txt
# You can edit the include path and order by changing the ProGuard
# include property in project.properties.
#
# For more details, see
# http://developer.android.com/guide/developing/tools/proguard.html
# Add any project specific keep options here:
# If your project uses WebView with JS, uncomment the following
# and specify the fully qualified class name to the JavaScript interface
# class:
#-keepclassmembers class fqcn.of.javascript.interface.for.webview {
# public *;
#}

@ -0,0 +1,14 @@
# This file is automatically generated by Android Tools.
# Do not modify this file -- YOUR CHANGES WILL BE ERASED!
#
# This file must be checked in Version Control Systems.
#
# To customize properties used by the Ant build system edit
# "ant.properties", and override values to adapt the script to your
# project structure.
#
# To enable ProGuard to shrink and obfuscate your code, uncomment this (available properties: sdk.dir, user.home):
#proguard.config=${sdk.dir}/tools/proguard/proguard-android.txt:proguard-project.txt
# Project target.
target=android-28

@ -0,0 +1,33 @@
# lokinet android
this directory contains basic stuff for lokinet on android.
## Prerequsites
To build you need the following:
* Gradle (6.x)
* Android SDK (latest version)
* Android NDK (latest version)
## Building
Next set up the path to Android SDK and NDK in `local.properties`
```
sdk.dir=/path/to/android/sdk
ndk.dir=/path/to/android/ndk
```
Then build:
$ gradle assemble
This fetches a large amount (several dozen Gigabytes) of files from some
server somewhere dumping it on your filesystem to make the thing do the
building, then proceeds to peg all your cores for several dozen minutes
while it does the required incantations to build 2 apks.
The build outputs apks to to subdirectories in `build/outputs/apk/`
one called `debug` for debug builds and one called `release` for release builds.

Binary file not shown.

After

Width:  |  Height:  |  Size: 6.4 KiB

@ -0,0 +1,27 @@
<LinearLayout android:id="@+id/main_layout"
xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:orientation="vertical"
android:paddingBottom="@dimen/vertical_page_margin"
android:paddingLeft="@dimen/horizontal_page_margin"
android:paddingRight="@dimen/horizontal_page_margin"
android:paddingTop="@dimen/vertical_page_margin"
tools:context=".PermsAskerActivity">
<TextView
android:id="@+id/textview_retry"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_marginBottom="@dimen/horizontal_page_margin"
android:visibility="gone"
/>
<Button
android:id="@+id/button_request_write_ext_storage_perms"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="Retry requesting VPN"
android:visibility="gone"/>
</LinearLayout>

@ -0,0 +1,27 @@
<LinearLayout android:id="@+id/layout_prompt"
xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:orientation="vertical"
android:paddingBottom="@dimen/vertical_page_margin"
android:paddingLeft="@dimen/horizontal_page_margin"
android:paddingRight="@dimen/horizontal_page_margin"
android:paddingTop="@dimen/vertical_page_margin"
tools:context=".PermsAskerActivity">
<TextView
android:id="@+id/textview_explanation"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_marginBottom="@dimen/horizontal_page_margin"
android:text="VPN permissions are required for lokinet usage."
/>
<Button
android:id="@+id/button_ok"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="OK"
/>
</LinearLayout>

@ -0,0 +1,16 @@
<menu
xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto"
xmlns:tools="http://schemas.android.com/tools"
tools:context=".LokiNetActivity">
<item
android:id="@+id/action_start"
android:title="@string/action_start"
android:orderInCategory="98"
/>
<item
android:id="@+id/action_stop"
android:title="@string/action_stop"
android:orderInCategory="99"
/>
</menu>

@ -0,0 +1,19 @@
<?xml version="1.0" encoding="utf-8"?>
<resources>
<string name="app_name">lokinet</string>
<string name="action_start">Start</string>
<string name="action_stop">Stop</string>
<string name="already_stopped">Already stopped</string>
<string name="loaded">lokinet loaded</string>
<string name="starting">lokinet is starting</string>
<string name="jniLibraryLoaded">lokinet: loaded JNI libraries</string>
<string name="startedOkay">lokinet started</string>
<string name="startFailed">lokinet start failed</string>
<string name="stopped">lokinet has stopped</string>
<string name="remaining">remaining</string>
<string name="title_activity_perms_asker_prompt">Prompt</string>
<string name="bootstrap_ok">got bootstrap node info</string>
<string name="bootstrap_fail">failed to bootstrap</string>
<string name="netdb_create_fail">failed to create netdb directory</string>
<string name="vpn_setup_fail">failed to set up vpn tunnel</string>
</resources>

@ -0,0 +1,16 @@
<resources>
<!-- Define standard dimensions to comply with Holo-style grids and rhythm. -->
<dimen name="margin_tiny">4dp</dimen>
<dimen name="margin_small">8dp</dimen>
<dimen name="margin_medium">16dp</dimen>
<dimen name="margin_large">32dp</dimen>
<dimen name="margin_huge">64dp</dimen>
<!-- Semantic definitions -->
<dimen name="horizontal_page_margin">@dimen/margin_medium</dimen>
<dimen name="vertical_page_margin">@dimen/margin_medium</dimen>
</resources>

@ -0,0 +1 @@
rootProject.name = "lokinet"

@ -0,0 +1,185 @@
package network.loki.lokinet;
import java.io.File;
import java.io.InputStream;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.net.URL;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import android.app.Activity;
import android.content.Context;
import android.content.ComponentName;
import android.content.ServiceConnection;
import android.Manifest;
import android.net.VpnService;
import android.os.AsyncTask;
import android.content.Intent;
import android.os.Bundle;
import android.os.IBinder;
import android.view.Menu;
import android.view.MenuItem;
import android.widget.TextView;
import android.util.Log;
public class LokiNetActivity extends Activity {
private static final String TAG = "lokinet-activity";
private TextView textView;
private static final String DefaultBootstrapURL = "https://seed.lokinet.org/lokinet.signed";
private AsyncBootstrap bootstrapper;
public static final String LOG_TAG = "LokinetDaemon";
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
textView = new TextView(this);
setContentView(textView);
System.loadLibrary("lokinet-android");
}
private static void writeFile(File out, InputStream instream) throws IOException {
OutputStream outstream = new FileOutputStream(out);
byte[] buffer = new byte[512];
int len;
try {
do {
len = instream.read(buffer);
if (len > 0) {
outstream.write(buffer, 0, len);
}
}
while (len != -1);
} finally {
outstream.close();
}
}
public void startLokinet()
{
if(bootstrapper != null)
return;
bootstrapper = new AsyncBootstrap();
bootstrapper.execute(DefaultBootstrapURL);
}
public void runLokinetService()
{
Intent intent = VpnService.prepare(getApplicationContext());
if (intent != null)
{
Log.d(LOG_TAG, "VpnService.prepare() returned an Intent, so launch that intent.");
startActivityForResult(intent, 0);
}
else
{
Log.w(LOG_TAG, "VpnService.prepare() returned null, not running.");
}
}
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data)
{
if (resultCode == RESULT_OK)
{
Log.d(LOG_TAG, "VpnService prepared intent RESULT_OK, launching LokinetDaemon Service");
startService(new Intent(LokiNetActivity.this,
LokinetDaemon.class));
}
else
{
Log.d(LOG_TAG, "VpnService prepared intent NOT RESULT_OK, shit.");
}
}
@Override
protected void onDestroy() {
super.onDestroy();
textView = null;
}
public File getRootDir()
{
return getFilesDir();
}
private class AsyncBootstrap extends AsyncTask<String, String, String>
{
public String doInBackground(String ... urls) {
try
{
File bootstrapFile = new File(getRootDir(), "bootstrap.signed");
URL bootstrapURL = new URL(urls[0]);
InputStream instream = bootstrapURL.openStream();
writeFile(bootstrapFile, instream);
instream.close();
return getString(R.string.bootstrap_ok);
}
catch(Exception thrown)
{
return getString(R.string.bootstrap_fail) + ": " + throwableToString(thrown);
}
}
public void onPostExecute(String val) {
textView.setText(val);
if(val.equals(getString(R.string.bootstrap_ok)))
runLokinetService();
bootstrapDone();
}
}
private void bootstrapDone()
{
bootstrapper = null;
}
private CharSequence throwableToString(Throwable tr) {
StringWriter sw = new StringWriter(8192);
PrintWriter pw = new PrintWriter(sw);
tr.printStackTrace(pw);
pw.close();
return sw.toString();
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.options_main, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle action bar item clicks here. The action bar will
// automatically handle clicks on the Home/Up button, so long
// as you specify a parent activity in AndroidManifest.xml.
int id = item.getItemId();
switch(id){
case R.id.action_start:
startLokinet();
return true;
case R.id.action_stop:
return true;
}
return super.onOptionsItemSelected(item);
}
}

@ -26,9 +26,6 @@ public class LokinetDaemon extends VpnService
private static native String DetectFreeRange();
public native String DumpStatus();
public static final String LOG_TAG = "LokinetDaemon";
ByteBuffer impl = null;

@ -0,0 +1,28 @@
package network.loki.lokinet;
import android.util.Log;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.net.ConnectivityManager;
import android.net.NetworkInfo;
public class NetworkStateChangeReceiver extends BroadcastReceiver {
private static final String TAG = "lokinet";
//api level 1
@Override
public void onReceive(final Context context, final Intent intent) {
Log.d(TAG,"Network state change");
try {
ConnectivityManager cm = (ConnectivityManager) context.getSystemService(Context.CONNECTIVITY_SERVICE);
NetworkInfo activeNetworkInfo = cm.getActiveNetworkInfo();
boolean isConnected = activeNetworkInfo!=null && activeNetworkInfo.isConnected();
// https://developer.android.com/training/monitoring-device-state/connectivity-monitoring.html?hl=ru
// boolean isWiFi = activeNetworkInfo!=null && (activeNetworkInfo.getType() == ConnectivityManager.TYPE_WIFI);
} catch (Throwable tr) {
Log.d(TAG,"",tr);
}
}
}

@ -0,0 +1,173 @@
package network.loki.lokinet;
import android.Manifest;
import android.app.Activity;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.os.Bundle;
import android.view.View;
import android.widget.Button;
import android.widget.TextView;
import java.lang.reflect.Method;
//dangerous perms, per https://developer.android.com/guide/topics/permissions/normal-permissions.html :
//android.permission.WRITE_EXTERNAL_STORAGE
public class PermsAskerActivity extends Activity {
private static final int PERMISSION_VPN = 0;
private Button button_request_write_ext_storage_perms;
private TextView textview_retry;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
startMainActivity();
/*
//if less than Android 6, no runtime perms req system present
if (android.os.Build.VERSION.SDK_INT < 23) {
return;
}
setContentView(R.layout.activity_perms_asker);
button_request_write_ext_storage_perms = (Button) findViewById(R.id.button_request_write_ext_storage_perms);
textview_retry = (TextView) findViewById(R.id.textview_retry);
button_request_write_ext_storage_perms.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
request_write_ext_storage_perms();
}
});
request_write_ext_storage_perms();
*/
}
private void request_write_ext_storage_perms() {
textview_retry.setVisibility(TextView.GONE);
button_request_write_ext_storage_perms.setVisibility(Button.GONE);
Method methodCheckPermission;
Method method_shouldShowRequestPermissionRationale;
Method method_requestPermissions;
try {
methodCheckPermission = getClass().getMethod("checkSelfPermission", String.class);
method_shouldShowRequestPermissionRationale =
getClass().getMethod("shouldShowRequestPermissionRationale", String.class);
method_requestPermissions =
getClass().getMethod("requestPermissions", String[].class, int.class);
} catch (NoSuchMethodException e) {
throw new RuntimeException(e);
}
Integer resultObj;
try {
resultObj = (Integer) methodCheckPermission.invoke(
this, Manifest.permission.BIND_VPN_SERVICE);
} catch (Throwable e) {
throw new RuntimeException(e);
}
if (resultObj != PackageManager.PERMISSION_GRANTED) {
// Should we show an explanation?
Boolean aBoolean;
try {
aBoolean = (Boolean) method_shouldShowRequestPermissionRationale.invoke(this,
Manifest.permission.BIND_VPN_SERVICE);
} catch (Exception e) {
throw new RuntimeException(e);
}
if (aBoolean) {
// Show an explanation to the user *asynchronously* -- don't block
// this thread waiting for the user's response! After the user
// sees the explanation, try again to request the permission.
showExplanation();
} else {
// No explanation needed, we can request the permission.
try {
method_requestPermissions.invoke(this,
new String[]{Manifest.permission.BIND_VPN_SERVICE},
PERMISSION_VPN);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
} else startMainActivity();
}
@Override
public void onRequestPermissionsResult(int requestCode,
String permissions[], int[] grantResults) {
switch (requestCode) {
case PERMISSION_VPN: {
// If request is cancelled, the result arrays are empty.
if (grantResults.length > 0
&& grantResults[0] == PackageManager.PERMISSION_GRANTED) {
// permission was granted, yay! Do the
// contacts-related task you need to do.
startMainActivity();
} else {
// permission denied, boo! Disable the
// functionality that depends on this permission.
textview_retry.setText("you need to allow this to continue");
textview_retry.setVisibility(TextView.VISIBLE);
button_request_write_ext_storage_perms.setVisibility(Button.VISIBLE);
}
return;
}
// other 'case' lines to check for other
// permissions this app might request.
}
}
private void startMainActivity() {
startActivity(new Intent(this, LokiNetActivity.class));
finish();
}
private static final int SHOW_EXPLANATION_REQUEST = 1; // The request code
private void showExplanation() {
Intent intent = new Intent(this, PermsExplanationActivity.class);
startActivityForResult(intent, SHOW_EXPLANATION_REQUEST);
}
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
// Check which request we're responding to
if (requestCode == SHOW_EXPLANATION_REQUEST) {
// Make sure the request was successful
if (resultCode == RESULT_OK) {
// Request the permission
Method method_requestPermissions;
try {
method_requestPermissions =
getClass().getMethod("requestPermissions", String[].class, int.class);
} catch (NoSuchMethodException e) {
throw new RuntimeException(e);
}
try {
method_requestPermissions.invoke(this,
new String[]{Manifest.permission.BIND_VPN_SERVICE},
PERMISSION_VPN);
} catch (Exception e) {
throw new RuntimeException(e);
}
} else {
finish(); //close the app
}
}
}
}

@ -0,0 +1,38 @@
package network.loki.lokinet;
import android.app.ActionBar;
import android.content.Intent;
import android.os.Bundle;
import android.app.Activity;
import android.view.View;
import android.widget.Button;
public class PermsExplanationActivity extends Activity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_perms_explanation);
ActionBar actionBar = getActionBar();
if(actionBar!=null)actionBar.setHomeButtonEnabled(false);
Button button_ok = (Button) findViewById(R.id.button_ok);
button_ok.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
returnFromActivity();
}
});
}
private void returnFromActivity() {
Intent data = new Intent();
Activity parent = getParent();
if (parent == null) {
setResult(Activity.RESULT_OK, data);
} else {
parent.setResult(Activity.RESULT_OK, data);
}
finish();
}
}

@ -57,4 +57,4 @@ else()
endif()
endif()
configure_file("${SRC}" "${DEST}" @ONLY)
configure_file("${SRC}" "${DEST}")

@ -5,32 +5,32 @@
set(LOCAL_MIRROR "" CACHE STRING "local mirror path/URL for lib downloads")
set(OPENSSL_VERSION 3.0.7 CACHE STRING "openssl version")
set(OPENSSL_VERSION 1.1.1k CACHE STRING "openssl version")
set(OPENSSL_MIRROR ${LOCAL_MIRROR} https://www.openssl.org/source CACHE STRING "openssl download mirror(s)")
set(OPENSSL_SOURCE openssl-${OPENSSL_VERSION}.tar.gz)
set(OPENSSL_HASH SHA256=83049d042a260e696f62406ac5c08bf706fd84383f945cf21bd61e9ed95c396e
set(OPENSSL_HASH SHA256=892a0875b9872acd04a9fde79b1f943075d5ea162415de3047c327df33fbaee5
CACHE STRING "openssl source hash")
set(EXPAT_VERSION 2.5.0 CACHE STRING "expat version")
set(EXPAT_VERSION 2.3.0 CACHE STRING "expat version")
string(REPLACE "." "_" EXPAT_TAG "R_${EXPAT_VERSION}")
set(EXPAT_MIRROR ${LOCAL_MIRROR} https://github.com/libexpat/libexpat/releases/download/${EXPAT_TAG}
CACHE STRING "expat download mirror(s)")
set(EXPAT_SOURCE expat-${EXPAT_VERSION}.tar.xz)
set(EXPAT_HASH SHA512=2da73b991b7c0c54440485c787e5edeb3567230204e31b3cac1c3a6713ec6f9f1554d3afffc0f8336168dfd5df02db4a69bcf21b4d959723d14162d13ab87516
set(EXPAT_HASH SHA512=dde8a9a094b18d795a0e86ca4aa68488b352dc67019e0d669e8b910ed149628de4c2a49bc3a5b832f624319336a01f9e4debe03433a43e1c420f36356d886820
CACHE STRING "expat source hash")
set(UNBOUND_VERSION 1.17.0 CACHE STRING "unbound version")
set(UNBOUND_VERSION 1.13.1 CACHE STRING "unbound version")
set(UNBOUND_MIRROR ${LOCAL_MIRROR} https://nlnetlabs.nl/downloads/unbound CACHE STRING "unbound download mirror(s)")
set(UNBOUND_SOURCE unbound-${UNBOUND_VERSION}.tar.gz)
set(UNBOUND_HASH SHA512=f6b9f279330fb19b5feca09524959940aad8c4e064528aa82b369c726d77e9e8e5ca23f366f6e9edcf2c061b96f482ed7a2c26ac70fc15ae5762b3d7e36a5284
set(UNBOUND_HASH SHA256=8504d97b8fc5bd897345c95d116e0ee0ddf8c8ff99590ab2b4bd13278c9f50b8
CACHE STRING "unbound source hash")
set(SQLITE3_VERSION 3400000 CACHE STRING "sqlite3 version")
set(SQLITE3_MIRROR ${LOCAL_MIRROR} https://www.sqlite.org/2022
set(SQLITE3_VERSION 3350500 CACHE STRING "sqlite3 version")
set(SQLITE3_MIRROR ${LOCAL_MIRROR} https://www.sqlite.org/2021
CACHE STRING "sqlite3 download mirror(s)")
set(SQLITE3_SOURCE sqlite-autoconf-${SQLITE3_VERSION}.tar.gz)
set(SQLITE3_HASH SHA3_256=7ee8f02b21edb4489df5082b5cf5b7ef47bcebcdb0e209bf14240db69633c878
CACHE STRING "sqlite3 source hash")
set(SQLITE3_HASH SHA512=039af796f79fc4517be0bd5ba37886264d49da309e234ae6fccdb488ef0109ed2b917fc3e6c1fc7224dff4f736824c653aaf8f0a37550c5ebc14d035cb8ac737
CACHE STRING "sqlite3 source hash")
set(SODIUM_VERSION 1.0.18 CACHE STRING "libsodium version")
set(SODIUM_MIRROR ${LOCAL_MIRROR}
@ -48,27 +48,29 @@ set(ZMQ_SOURCE zeromq-${ZMQ_VERSION}.tar.gz)
set(ZMQ_HASH SHA512=e198ef9f82d392754caadd547537666d4fba0afd7d027749b3adae450516bcf284d241d4616cad3cb4ad9af8c10373d456de92dc6d115b037941659f141e7c0e
CACHE STRING "libzmq source hash")
set(LIBUV_VERSION 1.44.2 CACHE STRING "libuv version")
set(LIBUV_VERSION 1.41.0 CACHE STRING "libuv version")
set(LIBUV_MIRROR ${LOCAL_MIRROR} https://dist.libuv.org/dist/v${LIBUV_VERSION}
CACHE STRING "libuv mirror(s)")
set(LIBUV_SOURCE libuv-v${LIBUV_VERSION}.tar.gz)
set(LIBUV_HASH SHA512=91197ff9303112567bbb915bbb88058050e2ad1c048815a3b57c054635d5dc7df458b956089d785475290132236cb0edcfae830f5d749de29a9a3213eeaf0b20
set(LIBUV_HASH SHA512=33613fa28e8136507300eba374351774849b6b39aab4e53c997a918d3bc1d1094c6123e0e509535095b14dc5daa885eadb1a67bed46622ad3cc79d62dc817e84
CACHE STRING "libuv source hash")
set(ZLIB_VERSION 1.2.13 CACHE STRING "zlib version")
set(ZLIB_VERSION 1.2.11 CACHE STRING "zlib version")
set(ZLIB_MIRROR ${LOCAL_MIRROR} https://zlib.net
CACHE STRING "zlib mirror(s)")
set(ZLIB_SOURCE zlib-${ZLIB_VERSION}.tar.xz)
set(ZLIB_HASH SHA256=d14c38e313afc35a9a8760dadf26042f51ea0f5d154b0630a31da0540107fb98
CACHE STRING "zlib source hash")
set(CURL_VERSION 7.86.0 CACHE STRING "curl version")
set(ZLIB_SOURCE zlib-${ZLIB_VERSION}.tar.gz)
set(ZLIB_HASH SHA512=73fd3fff4adeccd4894084c15ddac89890cd10ef105dd5e1835e1e9bbb6a49ff229713bd197d203edfa17c2727700fce65a2a235f07568212d820dca88b528ae
CACHE STRING "zlib source hash")
set(CURL_VERSION 7.76.1 CACHE STRING "curl version")
set(CURL_MIRROR ${LOCAL_MIRROR} https://curl.haxx.se/download https://curl.askapache.com
CACHE STRING "curl mirror(s)")
set(CURL_SOURCE curl-${CURL_VERSION}.tar.xz)
set(CURL_HASH SHA512=18e03a3c00f22125e07bddb18becbf5acdca22baeb7b29f45ef189a5c56f95b2d51247813f7a9a90f04eb051739e9aa7d3a1c5be397bae75d763a2b918d1b656
set(CURL_HASH SHA256=64bb5288c39f0840c07d077e30d9052e1cbb9fa6c2dc52523824cc859e679145
CACHE STRING "curl source hash")
include(ExternalProject)
set(DEPS_DESTDIR ${CMAKE_BINARY_DIR}/static-deps)
@ -125,21 +127,21 @@ if(ANDROID)
set(android_toolchain_prefix x86_64)
set(android_toolchain_suffix linux-android)
elseif(CMAKE_ANDROID_ARCH_ABI MATCHES x86)
set(android_machine x86)
set(android_machine i686)
set(cross_host "--host=i686-linux-android")
set(android_compiler_prefix i686)
set(android_compiler_suffix linux-android23)
set(android_toolchain_prefix i686)
set(android_toolchain_suffix linux-android)
elseif(CMAKE_ANDROID_ARCH_ABI MATCHES armeabi-v7a)
set(android_machine arm)
set(android_machine armv7)
set(cross_host "--host=armv7a-linux-androideabi")
set(android_compiler_prefix armv7a)
set(android_compiler_suffix linux-androideabi23)
set(android_toolchain_prefix arm)
set(android_toolchain_suffix linux-androideabi)
elseif(CMAKE_ANDROID_ARCH_ABI MATCHES arm64-v8a)
set(android_machine arm64)
set(android_machine aarch64)
set(cross_host "--host=aarch64-linux-android")
set(android_compiler_prefix aarch64)
set(android_compiler_suffix linux-android23)
@ -167,18 +169,6 @@ if(APPLE)
set(deps_CXXFLAGS "${deps_CXXFLAGS} -mmacosx-version-min=${CMAKE_OSX_DEPLOYMENT_TARGET}")
endif()
if(_winver)
set(deps_CFLAGS "${deps_CFLAGS} -D_WIN32_WINNT=${_winver}")
set(deps_CXXFLAGS "${deps_CXXFLAGS} -D_WIN32_WINNT=${_winver}")
endif()
if("${CMAKE_GENERATOR}" STREQUAL "Unix Makefiles")
set(_make $(MAKE))
else()
set(_make make)
endif()
# Builds a target; takes the target name (e.g. "readline") and builds it in an external project with
# target name suffixed with `_external`. Its upper-case value is used to get the download details
@ -188,8 +178,8 @@ set(build_def_DEPENDS "")
set(build_def_PATCH_COMMAND "")
set(build_def_CONFIGURE_COMMAND ./configure ${cross_host} --disable-shared --prefix=${DEPS_DESTDIR} --with-pic
"CC=${deps_cc}" "CXX=${deps_cxx}" "CFLAGS=${deps_CFLAGS}" "CXXFLAGS=${deps_CXXFLAGS}" ${cross_rc})
set(build_def_BUILD_COMMAND ${_make})
set(build_def_INSTALL_COMMAND ${_make} install)
set(build_def_BUILD_COMMAND make)
set(build_def_INSTALL_COMMAND make install)
set(build_def_BUILD_BYPRODUCTS ${DEPS_DESTDIR}/lib/lib___TARGET___.a ${DEPS_DESTDIR}/include/___TARGET___.h)
function(build_external target)
@ -228,7 +218,7 @@ build_external(libuv
add_static_target(libuv libuv_external libuv.a)
target_link_libraries(libuv INTERFACE ${CMAKE_DL_LIBS})
build_external(zlib
CONFIGURE_COMMAND ${CMAKE_COMMAND} -E env "CC=${deps_cc}" "CFLAGS=${deps_CFLAGS} -fPIC" ${cross_extra} ./configure --prefix=${DEPS_DESTDIR} --static
BUILD_BYPRODUCTS
@ -239,56 +229,25 @@ add_static_target(zlib zlib_external libz.a)
set(openssl_system_env "")
set(openssl_arch "")
set(openssl_configure_command ./config)
set(openssl_flags "CFLAGS=${deps_CFLAGS}")
set(unbound_ldflags "")
if(CMAKE_CROSSCOMPILING)
if(ARCH_TRIPLET STREQUAL x86_64-w64-mingw32)
set(openssl_arch mingw64)
set(openssl_system_env RC=${CMAKE_RC_COMPILER} AR=${ARCH_TRIPLET}-ar RANLIB=${ARCH_TRIPLET}-ranlib)
set(openssl_system_env SYSTEM=MINGW64 RC=${CMAKE_RC_COMPILER} AR=${ARCH_TRIPLET}-ar RANLIB=${ARCH_TRIPLET}-ranlib)
elseif(ARCH_TRIPLET STREQUAL i686-w64-mingw32)
set(openssl_arch mingw)
set(openssl_system_env RC=${CMAKE_RC_COMPILER} AR=${ARCH_TRIPLET}-ar RANLIB=${ARCH_TRIPLET}-ranlib)
set(openssl_system_env SYSTEM=MINGW32 RC=${CMAKE_RC_COMPILER} AR=${ARCH_TRIPLET}-ar RANLIB=${ARCH_TRIPLET}-ranlib)
elseif(ANDROID)
set(openssl_arch android-${android_machine})
set(openssl_system_env LD=${deps_ld} RANLIB=${deps_ranlib} AR=${deps_ar} ANDROID_NDK_ROOT=${CMAKE_ANDROID_NDK} "PATH=${CMAKE_ANDROID_NDK}/toolchains/llvm/prebuilt/linux-x86_64/bin:$ENV{PATH}")
list(APPEND openssl_flags "CPPFLAGS=-D__ANDROID_API__=${ANDROID_API}")
set(openssl_system_env SYSTEM=Linux MACHINE=${android_machine} LD=${deps_ld} RANLIB=${deps_ranlib} AR=${deps_ar})
set(openssl_extra_opts no-asm)
elseif(ARCH_TRIPLET STREQUAL mips64-linux-gnuabi64)
set(openssl_arch linux-mips64)
elseif(ARCH_TRIPLET STREQUAL mips-linux-gnu)
set(openssl_arch linux-mips32)
elseif(ARCH_TRIPLET STREQUAL mips-openwrt-linux)
set(unbound_ldflags "-latomic")
set(openssl_arch linux-mips32)
elseif(ARCH_TRIPLET STREQUAL mipsel-linux-gnu)
set(openssl_arch linux-mips)
elseif(ARCH_TRIPLET STREQUAL aarch64-linux-gnu OR ARCH_TRIPLET STREQUAL aarch64-openwrt-linux-musl)
# cross compile arm64
set(openssl_arch linux-aarch64)
elseif(ARCH_TRIPLET MATCHES arm-linux)
# cross compile armhf
set(openssl_arch linux-armv4)
elseif(ARCH_TRIPLET MATCHES powerpc64le)
# cross compile ppc64le
set(openssl_arch linux-ppc64le)
endif()
elseif(CMAKE_C_FLAGS MATCHES "-march=armv7")
# Help openssl figure out that we're building from armv7 even if on armv8 hardware:
set(openssl_arch linux-armv4)
set(openssl_system_env SYSTEM=Linux MACHINE=armv7)
endif()
build_external(openssl
CONFIGURE_COMMAND ${CMAKE_COMMAND} -E env CC=${deps_cc} ${openssl_system_env} ${openssl_configure_command}
--prefix=${DEPS_DESTDIR} --libdir=lib ${openssl_extra_opts}
no-shared no-capieng no-dso no-dtls1 no-ec_nistp_64_gcc_128 no-gost
no-md2 no-rc5 no-rdrand no-rfc3779 no-sctp no-ssl-trace no-ssl3
no-static-engine no-tests no-weak-ssl-ciphers no-zlib no-zlib-dynamic ${openssl_flags}
${openssl_arch}
BUILD_COMMAND ${CMAKE_COMMAND} -E env ${openssl_system_env} ${_make}
INSTALL_COMMAND ${_make} install_sw
CONFIGURE_COMMAND ${CMAKE_COMMAND} -E env CC=${deps_cc} ${openssl_system_env} ./config
--prefix=${DEPS_DESTDIR} ${openssl_extra_opts} no-shared no-capieng no-dso no-dtls1 no-ec_nistp_64_gcc_128 no-gost
no-heartbeats no-md2 no-rc5 no-rdrand no-rfc3779 no-sctp no-ssl-trace no-ssl2 no-ssl3
no-static-engine no-tests no-weak-ssl-ciphers no-zlib no-zlib-dynamic "CFLAGS=${deps_CFLAGS}"
INSTALL_COMMAND make install_sw
BUILD_BYPRODUCTS
${DEPS_DESTDIR}/lib/libssl.a ${DEPS_DESTDIR}/lib/libcrypto.a
${DEPS_DESTDIR}/include/openssl/ssl.h ${DEPS_DESTDIR}/include/openssl/crypto.h
@ -301,6 +260,7 @@ endif()
set(OPENSSL_INCLUDE_DIR ${DEPS_DESTDIR}/include)
set(OPENSSL_CRYPTO_LIBRARY ${DEPS_DESTDIR}/lib/libcrypto.a ${DEPS_DESTDIR}/lib/libssl.a)
set(OPENSSL_VERSION 1.1.1)
set(OPENSSL_ROOT_DIR ${DEPS_DESTDIR})
build_external(expat
@ -311,19 +271,13 @@ build_external(expat
add_static_target(expat expat_external libexpat.a)
if(WIN32)
set(unbound_patch
PATCH_COMMAND ${PROJECT_SOURCE_DIR}/contrib/apply-patches.sh
${PROJECT_SOURCE_DIR}/contrib/patches/unbound-delete-crash-fix.patch)
endif()
build_external(unbound
DEPENDS openssl_external expat_external
${unbound_patch}
CONFIGURE_COMMAND ./configure ${cross_host} ${cross_rc} --prefix=${DEPS_DESTDIR} --disable-shared
--enable-static --with-libunbound-only --with-pic
--$<IF:$<BOOL:${WITH_LTO}>,enable,disable>-flto --with-ssl=${DEPS_DESTDIR}
--with-libexpat=${DEPS_DESTDIR}
"CC=${deps_cc}" "CFLAGS=${deps_CFLAGS}" "LDFLAGS=${unbound_ldflags}"
"CC=${deps_cc}" "CFLAGS=${deps_CFLAGS}"
)
add_static_target(libunbound unbound_external libunbound.a)
if(NOT WIN32)
@ -338,11 +292,8 @@ build_external(sodium CONFIGURE_COMMAND ./configure ${cross_host} ${cross_rc} --
--enable-static --with-pic "CC=${deps_cc}" "CFLAGS=${deps_CFLAGS}")
add_static_target(sodium sodium_external libsodium.a)
if(WITH_PEERSTATS_BACKEND)
build_external(sqlite3)
add_static_target(sqlite3 sqlite3_external libsqlite3.a)
endif()
build_external(sqlite3)
add_static_target(sqlite3 sqlite3_external libsqlite3.a)
if(ARCH_TRIPLET MATCHES mingw)
@ -354,9 +305,7 @@ endif()
if(CMAKE_CROSSCOMPILING AND ARCH_TRIPLET MATCHES mingw)
set(zmq_patch
PATCH_COMMAND ${PROJECT_SOURCE_DIR}/contrib/apply-patches.sh
${PROJECT_SOURCE_DIR}/contrib/patches/libzmq-mingw-wepoll.patch
${PROJECT_SOURCE_DIR}/contrib/patches/libzmq-mingw-unistd.patch)
PATCH_COMMAND ${PROJECT_SOURCE_DIR}/contrib/apply-patches.sh ${PROJECT_SOURCE_DIR}/contrib/patches/libzmq-mingw-wepoll.patch ${PROJECT_SOURCE_DIR}/contrib/patches/libzmq-mingw-closesocket.patch)
endif()
build_external(zmq
@ -379,19 +328,6 @@ set_target_properties(libzmq PROPERTIES
INTERFACE_LINK_LIBRARIES "${libzmq_link_libs}"
INTERFACE_COMPILE_DEFINITIONS "ZMQ_STATIC")
#
#
#
# Everything that follows is *only* for lokinet-bootstrap (i.e. if adding new deps put them *above*
# this).
#
#
#
if(NOT WITH_BOOTSTRAP)
return()
endif()
set(curl_extra)
if(WIN32)
set(curl_ssl_opts --without-ssl --with-schannel)
@ -443,12 +379,11 @@ foreach(curl_arch ${curl_arches})
--enable-crypto-auth --disable-ntlm-wb --disable-tls-srp --disable-unix-sockets --disable-cookies
--enable-http-auth --enable-doh --disable-mime --enable-dateparse --disable-netrc --without-libidn2
--disable-progress-meter --without-brotli --with-zlib=${DEPS_DESTDIR} ${curl_ssl_opts}
--without-librtmp --disable-versioned-symbols --enable-hidden-symbols
--without-libmetalink --without-librtmp --disable-versioned-symbols --enable-hidden-symbols
--without-zsh-functions-dir --without-fish-functions-dir
--without-nghttp3 --without-zstd
"CC=${deps_cc}" "CFLAGS=${deps_noarch_CFLAGS}${cflags_extra}" ${curl_extra}
BUILD_COMMAND true
INSTALL_COMMAND ${_make} -C lib install && ${_make} -C include install
INSTALL_COMMAND make -C lib install && make -C include install
BUILD_BYPRODUCTS
${curl_prefix}/lib/libcurl.a
${curl_prefix}/include/curl/curl.h
@ -457,7 +392,7 @@ foreach(curl_arch ${curl_arches})
list(APPEND curl_lib_outputs ${curl_prefix}/lib/libcurl.a)
endforeach()
message(STATUS "TARGETS: ${curl_lib_targets}")
if(IOS AND num_arches GREATER 1)
# We are building multiple architectures for different iOS devices, so we need to glue the

@ -1,46 +1,22 @@
# We do this via a custom command that re-invokes a cmake script because we need the DEPENDS on .git/index so that we will re-run it (to regenerate the commit tag in the version) whenever the current commit changes. If we used a configure_file directly here, it would only re-run when something else causes cmake to re-run.
if(LOKINET_VERSIONTAG)
set(VERSIONTAG "${LOKINET_VERSIONTAG}")
configure_file("${CMAKE_CURRENT_SOURCE_DIR}/constants/version.cpp.in" "${CMAKE_CURRENT_BINARY_DIR}/constants/version.cpp" @ONLY)
else()
set(VERSIONTAG "${GIT_VERSION}")
set(GIT_INDEX_FILE "${PROJECT_SOURCE_DIR}/.git/index")
find_package(Git)
if(EXISTS "${GIT_INDEX_FILE}" AND ( GIT_FOUND OR Git_FOUND) )
message(STATUS "Found Git: ${GIT_EXECUTABLE}")
set(genversion_args "-DGIT=${GIT_EXECUTABLE}")
foreach(v lokinet_VERSION lokinet_VERSION_MAJOR lokinet_VERSION_MINOR lokinet_VERSION_PATCH RELEASE_MOTTO)
list(APPEND genversion_args "-D${v}=${${v}}")
endforeach()
add_custom_command(
OUTPUT "${CMAKE_CURRENT_BINARY_DIR}/constants/version.cpp"
COMMAND "${CMAKE_COMMAND}"
${genversion_args}
"-D" "SRC=${CMAKE_CURRENT_SOURCE_DIR}/constants/version.cpp.in"
"-D" "DEST=${CMAKE_CURRENT_BINARY_DIR}/constants/version.cpp"
"-P" "${CMAKE_CURRENT_LIST_DIR}/GenVersion.cmake"
DEPENDS "${CMAKE_CURRENT_SOURCE_DIR}/constants/version.cpp.in"
"${GIT_INDEX_FILE}")
else()
configure_file("${CMAKE_CURRENT_SOURCE_DIR}/constants/version.cpp.in" "${CMAKE_CURRENT_BINARY_DIR}/constants/version.cpp" @ONLY)
endif()
endif()
find_package(Git QUIET)
set(GIT_INDEX_FILE "${PROJECT_SOURCE_DIR}/.git/index")
if(EXISTS ${GIT_INDEX_FILE} AND ( GIT_FOUND OR Git_FOUND) )
message(STATUS "Found Git: ${GIT_EXECUTABLE}")
if(WIN32)
foreach(exe IN ITEMS lokinet lokinet-vpn lokinet-bootstrap)
set(lokinet_EXE_NAME "${exe}.exe")
configure_file("${CMAKE_CURRENT_SOURCE_DIR}/win32/version.rc.in" "${CMAKE_BINARY_DIR}/${exe}.rc" @ONLY)
set_property(SOURCE "${CMAKE_BINARY_DIR}/${exe}.rc" PROPERTY GENERATED 1)
endforeach()
endif()
add_custom_target(genversion_cpp DEPENDS "${CMAKE_CURRENT_BINARY_DIR}/constants/version.cpp")
if(WIN32)
add_custom_target(genversion_rc DEPENDS "${CMAKE_BINARY_DIR}/lokinet.rc" "${CMAKE_BINARY_DIR}/lokinet-vpn.rc" "${CMAKE_BINARY_DIR}/lokinet-bootstrap.rc")
add_custom_command(
OUTPUT "${CMAKE_CURRENT_BINARY_DIR}/constants/version.cpp"
COMMAND "${CMAKE_COMMAND}"
"-D" "GIT=${GIT_EXECUTABLE}"
"-D" "SRC=${CMAKE_CURRENT_SOURCE_DIR}/constants/version.cpp.in"
"-D" "DEST=${CMAKE_CURRENT_BINARY_DIR}/constants/version.cpp"
"-P" "${CMAKE_CURRENT_LIST_DIR}/GenVersion.cmake"
DEPENDS "${CMAKE_CURRENT_SOURCE_DIR}/constants/version.cpp.in"
"${GIT_INDEX_FILE}")
else()
add_custom_target(genversion_rc)
message(STATUS "Git was not found! Setting version to to nogit")
set(VERSIONTAG "nogit")
configure_file("${CMAKE_CURRENT_SOURCE_DIR}/constants/version.cpp.in" "${CMAKE_CURRENT_BINARY_DIR}/constants/version.cpp")
endif()
add_custom_target(genversion DEPENDS genversion_cpp genversion_rc)
add_custom_target(genversion DEPENDS "${CMAKE_CURRENT_BINARY_DIR}/constants/version.cpp")

@ -0,0 +1,8 @@
function(add_log_tag target)
get_target_property(TARGET_SRCS ${target} SOURCES)
foreach(F ${TARGET_SRCS})
get_filename_component(fpath "${F}" ABSOLUTE)
string(REPLACE "${PROJECT_SOURCE_DIR}/" "" logtag "${fpath}")
set_property(SOURCE ${F} APPEND PROPERTY COMPILE_DEFINITIONS LOG_TAG=\"${logtag}\")
endforeach()
endfunction()

@ -44,8 +44,7 @@ if(filesystem_is_good EQUAL 1)
else()
# Probably broken AF macos
message(STATUS "std::filesystem is not available, apparently this compiler isn't C++17 compliant; falling back to ghc::filesystem")
set(GHC_FILESYSTEM_WITH_INSTALL OFF CACHE INTERNAL "")
add_subdirectory(external/ghc-filesystem)
target_link_libraries(filesystem INTERFACE ghc_filesystem)
target_compile_definitions(filesystem INTERFACE USE_GHC_FILESYSTEM CLI11_HAS_FILESYSTEM=0)
target_compile_definitions(filesystem INTERFACE USE_GHC_FILESYSTEM)
endif()

@ -2,9 +2,6 @@
include(CheckIPOSupported)
option(WITH_LTO "enable lto on compile time" ON)
if(WITH_LTO)
if(WIN32)
message(FATAL_ERROR "LTO not supported on win32 targets, please set -DWITH_LTO=OFF")
endif()
check_ipo_supported(RESULT IPO_ENABLED OUTPUT ipo_error)
if(IPO_ENABLED)
message(STATUS "LTO enabled")

@ -1,17 +0,0 @@
set(default_build_gui OFF)
if(APPLE OR WIN32)
set(default_build_gui ON)
endif()
if(WIN32)
set(GUI_EXE "" CACHE FILEPATH "path to a pre-built Windows GUI .exe to use (implies -DBUILD_GUI=OFF)")
if(GUI_EXE)
set(default_build_gui OFF)
endif()
endif()
option(BUILD_GUI "build electron gui from 'gui' submodule source" ${default_build_gui})
if(BUILD_GUI AND GUI_EXE)
message(FATAL_ERROR "-DGUI_EXE=... and -DBUILD_GUI=ON are mutually exclusive")
endif()

@ -1,67 +0,0 @@
if(WIN32 AND GUI_EXE)
message(STATUS "using pre-built lokinet gui executable: ${GUI_EXE}")
execute_process(COMMAND ${CMAKE_COMMAND} -E copy_if_different "${GUI_EXE}" "${PROJECT_BINARY_DIR}/gui/lokinet-gui.exe")
elseif(BUILD_GUI)
message(STATUS "Building lokinet-gui from source")
set(default_gui_target pack)
if(APPLE)
set(default_gui_target macos:raw)
elseif(WIN32)
set(default_gui_target win32)
endif()
set(GUI_YARN_TARGET "${default_gui_target}" CACHE STRING "yarn target for building the GUI")
set(GUI_YARN_EXTRA_OPTS "" CACHE STRING "extra options to pass into the yarn build command")
# allow manually specifying yarn with -DYARN=
if(NOT YARN)
find_program(YARN NAMES yarnpkg yarn REQUIRED)
endif()
message(STATUS "Building lokinet-gui with yarn ${YARN}, target ${GUI_YARN_TARGET}")
if(NOT WIN32)
add_custom_target(lokinet-gui
COMMAND ${YARN} install --frozen-lockfile &&
${YARN} ${GUI_YARN_EXTRA_OPTS} ${GUI_YARN_TARGET}
WORKING_DIRECTORY "${PROJECT_SOURCE_DIR}/gui")
endif()
if(APPLE)
add_custom_target(assemble_gui ALL
DEPENDS assemble lokinet-gui
COMMAND mkdir "${lokinet_app}/Contents/Helpers"
COMMAND cp -a "${PROJECT_SOURCE_DIR}/gui/release/mac/Lokinet-GUI.app" "${lokinet_app}/Contents/Helpers/"
COMMAND mkdir -p "${lokinet_app}/Contents/Resources/en.lproj"
COMMAND cp "${PROJECT_SOURCE_DIR}/contrib/macos/InfoPlist.strings" "${lokinet_app}/Contents/Resources/en.lproj/"
COMMAND cp "${lokinet_app}/Contents/Resources/icon.icns" "${lokinet_app}/Contents/Helpers/Lokinet-GUI.app/Contents/Resources/icon.icns"
COMMAND cp "${PROJECT_SOURCE_DIR}/contrib/macos/InfoPlist.strings" "${lokinet_app}/Contents/Helpers/Lokinet-GUI.app/Contents/Resources/en.lproj/"
COMMAND /usr/libexec/PlistBuddy
-c "Delete :CFBundleDisplayName"
-c "Add :LSHasLocalizedDisplayName bool true"
-c "Add :CFBundleDevelopmentRegion string en"
-c "Set :CFBundleShortVersionString ${lokinet_VERSION}"
-c "Set :CFBundleVersion ${lokinet_VERSION}.${LOKINET_APPLE_BUILD}"
"${lokinet_app}/Contents/Helpers/Lokinet-GUI.app/Contents/Info.plist"
)
elseif(WIN32)
file(MAKE_DIRECTORY "${PROJECT_BINARY_DIR}/gui")
add_custom_command(OUTPUT "${PROJECT_BINARY_DIR}/gui/lokinet-gui.exe"
COMMAND ${YARN} install --frozen-lockfile &&
USE_SYSTEM_7ZA=true DISPLAY= WINEDEBUG=-all WINEPREFIX="${PROJECT_BINARY_DIR}/wineprefix" ${YARN} ${GUI_YARN_EXTRA_OPTS} ${GUI_YARN_TARGET}
COMMAND ${CMAKE_COMMAND} -E copy_if_different
"${PROJECT_SOURCE_DIR}/gui/release/Lokinet-GUI_portable.exe"
"${PROJECT_BINARY_DIR}/gui/lokinet-gui.exe"
WORKING_DIRECTORY "${PROJECT_SOURCE_DIR}/gui")
add_custom_target(assemble_gui ALL COMMAND "true" DEPENDS "${PROJECT_BINARY_DIR}/gui/lokinet-gui.exe")
else()
message(FATAL_ERROR "Building/bundling the GUI from this repository is not supported on this platform")
endif()
else()
message(STATUS "not building gui")
endif()
if(NOT TARGET assemble_gui)
add_custom_target(assemble_gui COMMAND "true")
endif()

@ -1,46 +1,16 @@
set(CPACK_PACKAGE_VENDOR "lokinet.org")
set(CPACK_PACKAGE_HOMEPAGE_URL "https://lokinet.org/")
set(CPACK_RESOURCE_FILE_README "${PROJECT_SOURCE_DIR}/contrib/readme-installer.txt")
set(CPACK_RESOURCE_FILE_LICENSE "${PROJECT_SOURCE_DIR}/LICENSE")
set(CPACK_RESOURCE_FILE_LICENSE "${PROJECT_SOURCE_DIR}/LICENSE.txt")
if(WIN32)
include(cmake/win32_installer_deps.cmake)
install(FILES ${CMAKE_SOURCE_DIR}/contrib/configs/00-exit.ini DESTINATION share/conf.d COMPONENT exit_configs)
install(FILES ${CMAKE_SOURCE_DIR}/contrib/configs/00-keyfile.ini DESTINATION share/conf.d COMPONENT keyfile_configs)
install(FILES ${CMAKE_SOURCE_DIR}/contrib/configs/00-debug-log.ini DESTINATION share/conf.d COMPONENT debug_configs)
get_cmake_property(CPACK_COMPONENTS_ALL COMPONENTS)
list(REMOVE_ITEM CPACK_COMPONENTS_ALL "Unspecified" "lokinet" "gui" "exit_configs" "keyfile_configs" "debug_configs")
list(APPEND CPACK_COMPONENTS_ALL "lokinet" "gui" "exit_configs" "keyfile_configs" "debug_configs")
elseif(APPLE)
set(CPACK_GENERATOR DragNDrop;ZIP)
get_cmake_property(CPACK_COMPONENTS_ALL COMPONENTS)
list(REMOVE_ITEM CPACK_COMPONENTS_ALL "Unspecified")
endif()
include(CPack)
if(WIN32)
cpack_add_component(lokinet
DISPLAY_NAME "lokinet"
DESCRIPTION "core required lokinet files"
REQUIRED)
cpack_add_component(gui
DISPLAY_NAME "lokinet gui"
DESCRIPTION "electron based control panel for lokinet")
cpack_add_component(exit_configs
DISPLAY_NAME "auto-enable exit"
DESCRIPTION "automatically enable usage of exit.loki as an exit node\n"
DISABLED)
cpack_add_component(keyfile_configs
DISPLAY_NAME "persist address"
DESCRIPTION "persist .loki address across restarts of lokinet\nnot recommended when enabling exit nodes"
DISABLED)
cpack_add_component(debug_configs
DISPLAY_NAME "debug logging"
DESCRIPTION "enable debug spew log level by default"
DISABLED)
if(APPLE)
include(cmake/macos_installer_deps.cmake)
endif()
# This must always be last!
include(CPack)

@ -1,49 +1,49 @@
function(check_working_cxx_atomics64 varname)
set(OLD_CMAKE_REQUIRED_FLAGS ${CMAKE_REQUIRED_FLAGS})
if (EMBEDDED_CFG)
set(CMAKE_REQUIRED_FLAGS "${CMAKE_REQUIRED_FLAGS} -m32 -march=i486")
elseif(MSVC OR MSVC_VERSION)
set(CMAKE_REQUIRED_FLAGS "${CMAKE_REQUIRED_FLAGS} -arch:IA32 -std:c++14")
else()
# CMAKE_CXX_STANDARD does not propagate to cmake compile tests
set(CMAKE_REQUIRED_FLAGS "${CMAKE_REQUIRED_FLAGS} -std=c++14")
endif()
check_cxx_source_compiles("
#include <atomic>
#include <cstdint>
std::atomic<uint64_t> x (0);
int main() {
uint64_t i = x.load(std::memory_order_relaxed);
return 0;
}
" ${varname})
set(CMAKE_REQUIRED_FLAGS ${OLD_CMAKE_REQUIRED_FLAGS})
endfunction()
function(link_libatomic)
check_working_cxx_atomics64(HAVE_CXX_ATOMICS64_WITHOUT_LIB)
if(HAVE_CXX_ATOMICS64_WITHOUT_LIB)
message(STATUS "Have working 64bit atomics")
return()
endif()
if (NOT MSVC AND NOT MSVC_VERSION)
check_library_exists(atomic __atomic_load_8 "" HAVE_CXX_LIBATOMICS64)
if (HAVE_CXX_LIBATOMICS64)
message(STATUS "Have 64bit atomics via library")
list(APPEND CMAKE_REQUIRED_LIBRARIES "atomic")
check_working_cxx_atomics64(HAVE_CXX_ATOMICS64_WITH_LIB)
if (HAVE_CXX_ATOMICS64_WITH_LIB)
message(STATUS "Can link with libatomic")
link_libraries(-latomic)
return()
endif()
endif()
endif()
if (MSVC OR MSVC_VERSION)
message(FATAL_ERROR "Host compiler must support 64-bit std::atomic! (What does MSVC do to inline atomics?)")
else()
message(FATAL_ERROR "Host compiler must support 64-bit std::atomic!")
endif()
endfunction()
function(check_working_cxx_atomics64 varname)
set(OLD_CMAKE_REQUIRED_FLAGS ${CMAKE_REQUIRED_FLAGS})
if (EMBEDDED_CFG)
set(CMAKE_REQUIRED_FLAGS "${CMAKE_REQUIRED_FLAGS} -m32 -march=i486")
elseif(MSVC OR MSVC_VERSION)
set(CMAKE_REQUIRED_FLAGS "${CMAKE_REQUIRED_FLAGS} -arch:IA32 -std:c++14")
else()
# CMAKE_CXX_STANDARD does not propagate to cmake compile tests
set(CMAKE_REQUIRED_FLAGS "${CMAKE_REQUIRED_FLAGS} -std=c++14")
endif()
check_cxx_source_compiles("
#include <atomic>
#include <cstdint>
std::atomic<uint64_t> x (0);
int main() {
uint64_t i = x.load(std::memory_order_relaxed);
return 0;
}
" ${varname})
set(CMAKE_REQUIRED_FLAGS ${OLD_CMAKE_REQUIRED_FLAGS})
endfunction()
function(link_libatomic)
check_working_cxx_atomics64(HAVE_CXX_ATOMICS64_WITHOUT_LIB)
if(HAVE_CXX_ATOMICS64_WITHOUT_LIB)
message(STATUS "Have working 64bit atomics")
return()
endif()
if (NOT MSVC AND NOT MSVC_VERSION)
check_library_exists(atomic __atomic_load_8 "" HAVE_CXX_LIBATOMICS64)
if (HAVE_CXX_LIBATOMICS64)
message(STATUS "Have 64bit atomics via library")
list(APPEND CMAKE_REQUIRED_LIBRARIES "atomic")
check_working_cxx_atomics64(HAVE_CXX_ATOMICS64_WITH_LIB)
if (HAVE_CXX_ATOMICS64_WITH_LIB)
message(STATUS "Can link with libatomic")
link_libraries(-latomic)
return()
endif()
endif()
endif()
if (MSVC OR MSVC_VERSION)
message(FATAL_ERROR "Host compiler must support 64-bit std::atomic! (What does MSVC do to inline atomics?)")
else()
message(FATAL_ERROR "Host compiler must support 64-bit std::atomic!")
endif()
endfunction()

@ -0,0 +1,13 @@
# Properly links a target to a list of library names by finding the given libraries. Takes:
# - a target
# - a linktype (e.g. INTERFACE, PUBLIC, PRIVATE)
# - a library search path (or "" for defaults)
# - any number of library names
function(link_dep_libs target linktype libdirs)
foreach(lib ${ARGN})
find_library(link_lib-${lib} NAMES ${lib} PATHS ${libdirs})
if(link_lib-${lib})
target_link_libraries(${target} ${linktype} ${link_lib-${lib}})
endif()
endforeach()
endfunction()

@ -1,214 +0,0 @@
if(NOT APPLE)
return()
endif()
option(MACOS_SYSTEM_EXTENSION
"Build the network extension as a system extension rather than a plugin. This must be ON for non-app store release builds, and must be OFF for dev builds and Mac App Store distribution builds"
OFF)
option(CODESIGN "codesign the resulting app and extension" ON)
set(CODESIGN_ID "" CACHE STRING "codesign the macos app using this key identity; if empty we'll try to guess")
set(default_profile_type "dev")
if(MACOS_SYSTEM_EXTENSION)
set(default_profile_type "release")
endif()
set(CODESIGN_PROFILE "${PROJECT_SOURCE_DIR}/contrib/macos/lokinet.${default_profile_type}.provisionprofile" CACHE FILEPATH
"Path to a .provisionprofile to use for the main app")
set(CODESIGN_EXT_PROFILE "${PROJECT_SOURCE_DIR}/contrib/macos/lokinet-extension.${default_profile_type}.provisionprofile" CACHE FILEPATH
"Path to a .provisionprofile to use for the lokinet extension")
if(CODESIGN AND NOT CODESIGN_ID)
if(MACOS_SYSTEM_EXTENSION)
set(codesign_cert_pattern "Developer ID Application")
else()
set(codesign_cert_pattern "Apple Development")
endif()
execute_process(
COMMAND security find-identity -v -p codesigning
COMMAND sed -n "s/^ *[0-9][0-9]*) *\\([A-F0-9]\\{40\\}\\) *\"\\(${codesign_cert_pattern}.*\\)\"\$/\\1 \\2/p"
RESULT_VARIABLE find_id_exit_code
OUTPUT_VARIABLE find_id_output)
if(NOT find_id_exit_code EQUAL 0)
message(FATAL_ERROR "Finding signing identities with security find-identity failed; try specifying an id using -DCODESIGN_ID=...")
endif()
string(REGEX MATCHALL "(^|\n)[0-9A-F]+" find_id_sign_id "${find_id_output}")
if(NOT find_id_sign_id)
message(FATAL_ERROR "Did not find any \"${codesign_cert_pattern}\" identity; try specifying an id using -DCODESIGN_ID=...")
endif()
if (find_id_sign_id MATCHES ";")
message(FATAL_ERROR "Found multiple \"${codesign_cert_pattern}\" identities:\n${find_id_output}\nSpecify an identify using -DCODESIGN_ID=...")
endif()
set(CODESIGN_ID "${find_id_sign_id}" CACHE STRING "" FORCE)
endif()
if(CODESIGN)
message(STATUS "Codesigning using ${CODESIGN_ID}")
if (NOT MACOS_NOTARIZE_USER AND NOT MACOS_NOTARIZE_PASS AND NOT MACOS_NOTARIZE_ASC AND EXISTS "$ENV{HOME}/.notarization.cmake")
message(STATUS "Loading notarization info from ~/.notarization.cmake")
include("$ENV{HOME}/.notarization.cmake")
endif()
if (MACOS_NOTARIZE_USER AND MACOS_NOTARIZE_PASS AND MACOS_NOTARIZE_ASC)
message(STATUS "Enabling notarization with account ${MACOS_NOTARIZE_ASC}/${MACOS_NOTARIZE_USER}")
else()
message(WARNING "You have not set one or more of MACOS_NOTARIZE_USER, MACOS_NOTARIZE_PASS, MACOS_NOTARIZE_ASC: notarization will fail; see contrib/macos/README.txt")
endif()
else()
message(WARNING "Codesigning disabled; the resulting build will not run on most macOS systems")
endif()
foreach(prof IN ITEMS CODESIGN_PROFILE CODESIGN_EXT_PROFILE)
if(NOT ${prof})
message(WARNING "Missing a ${prof} provisioning profile: Apple will most likely log an uninformative error message to the system log and then kill harmless kittens if you try to run the result")
elseif(NOT EXISTS "${${prof}}")
message(FATAL_ERROR "Provisioning profile ${${prof}} does not exist; fix your -D${prof} path")
endif()
endforeach()
message(STATUS "Using ${CODESIGN_PROFILE} app provisioning profile")
message(STATUS "Using ${CODESIGN_EXT_PROFILE} extension provisioning profile")
set(lokinet_installer "${PROJECT_BINARY_DIR}/Lokinet ${PROJECT_VERSION}")
if(NOT CODESIGN)
set(lokinet_installer "${lokinet_installer}-UNSIGNED")
endif()
set(lokinet_app "${lokinet_installer}/Lokinet.app")
if(MACOS_SYSTEM_EXTENSION)
set(lokinet_ext_dir Contents/Library/SystemExtensions)
else()
set(lokinet_ext_dir Contents/PlugIns)
endif()
if(CODESIGN)
if(MACOS_SYSTEM_EXTENSION)
set(LOKINET_ENTITLEMENTS_TYPE sysext)
set(notarize_py_is_sysext True)
else()
set(LOKINET_ENTITLEMENTS_TYPE plugin)
set(notarize_py_is_sysext False)
endif()
configure_file(
"${PROJECT_SOURCE_DIR}/contrib/macos/sign.sh.in"
"${PROJECT_BINARY_DIR}/sign.sh"
@ONLY)
add_custom_target(
sign
DEPENDS "${PROJECT_BINARY_DIR}/sign.sh"
COMMAND "${PROJECT_BINARY_DIR}/sign.sh"
)
if(MACOS_NOTARIZE_USER AND MACOS_NOTARIZE_PASS AND MACOS_NOTARIZE_ASC)
configure_file(
"${PROJECT_SOURCE_DIR}/contrib/macos/notarize.py.in"
"${PROJECT_BINARY_DIR}/notarize.py"
@ONLY)
add_custom_target(
notarize
DEPENDS "${PROJECT_BINARY_DIR}/notarize.py" sign
COMMAND "${PROJECT_BINARY_DIR}/notarize.py"
)
else()
message(WARNING "You have not set one or more of MACOS_NOTARIZE_USER, MACOS_NOTARIZE_PASS, MACOS_NOTARIZE_ASC: notarization disabled")
endif()
else()
add_custom_target(sign COMMAND "true")
add_custom_target(notarize DEPENDS sign COMMAND "true")
endif()
set(mac_icon "${PROJECT_BINARY_DIR}/lokinet.icns")
add_custom_command(OUTPUT "${mac_icon}"
COMMAND ${PROJECT_SOURCE_DIR}/contrib/macos/mk-icns.sh ${PROJECT_SOURCE_DIR}/contrib/lokinet-mac.svg "${mac_icon}"
DEPENDS ${PROJECT_SOURCE_DIR}/contrib/lokinet-mac.svg ${PROJECT_SOURCE_DIR}/contrib/macos/mk-icns.sh)
add_custom_target(icon DEPENDS "${mac_icon}")
if(BUILD_PACKAGE)
add_executable(seticon "${PROJECT_SOURCE_DIR}/contrib/macos/seticon.swift")
add_custom_command(OUTPUT "${lokinet_installer}.dmg"
DEPENDS notarize seticon
COMMAND create-dmg
--volname "Lokinet ${PROJECT_VERSION}"
--volicon lokinet.icns
--background "${PROJECT_SOURCE_DIR}/contrib/macos/installer.tiff"
--text-size 16
--icon-size 128
--window-size 555 440
--icon Lokinet.app 151 196
--hide-extension Lokinet.app
--app-drop-link 403 196
--eula "${PROJECT_SOURCE_DIR}/LICENSE"
--no-internet-enable
"${lokinet_installer}.dmg"
"${lokinet_installer}"
COMMAND ./seticon lokinet.icns "${lokinet_installer}.dmg"
)
add_custom_target(dmg DEPENDS "${lokinet_installer}.dmg")
endif()
# Called later to set things up, after the main lokinet targets are set up
function(macos_target_setup)
if(MACOS_SYSTEM_EXTENSION)
target_compile_definitions(lokinet PRIVATE MACOS_SYSTEM_EXTENSION)
endif()
set_target_properties(lokinet
PROPERTIES
OUTPUT_NAME Lokinet
MACOSX_BUNDLE TRUE
MACOSX_BUNDLE_INFO_STRING "Lokinet IP Packet Onion Router"
MACOSX_BUNDLE_BUNDLE_NAME "Lokinet"
MACOSX_BUNDLE_BUNDLE_VERSION "${lokinet_VERSION}"
MACOSX_BUNDLE_LONG_VERSION_STRING "${lokinet_VERSION}"
MACOSX_BUNDLE_SHORT_VERSION_STRING "${lokinet_VERSION_MAJOR}.${lokinet_VERSION_MINOR}"
MACOSX_BUNDLE_GUI_IDENTIFIER "org.lokinet"
MACOSX_BUNDLE_INFO_PLIST "${PROJECT_SOURCE_DIR}/contrib/macos/lokinet.Info.plist.in"
MACOSX_BUNDLE_COPYRIGHT "© 2022, The Oxen Project"
)
add_custom_target(copy_bootstrap
DEPENDS lokinet-extension
COMMAND ${CMAKE_COMMAND} -E copy_if_different ${PROJECT_SOURCE_DIR}/contrib/bootstrap/mainnet.signed
$<TARGET_BUNDLE_DIR:lokinet-extension>/Contents/Resources/bootstrap.signed
)
add_dependencies(lokinet lokinet-extension icon)
if(CODESIGN_PROFILE)
add_custom_target(copy_prov_prof
DEPENDS lokinet
COMMAND ${CMAKE_COMMAND} -E copy_if_different ${CODESIGN_PROFILE}
$<TARGET_BUNDLE_DIR:lokinet>/Contents/embedded.provisionprofile
)
else()
add_custom_target(copy_prov_prof COMMAND true)
endif()
add_custom_target(assemble ALL
DEPENDS lokinet lokinet-extension icon copy_prov_prof copy_bootstrap
COMMAND rm -rf "${lokinet_app}"
COMMAND mkdir -p "${lokinet_installer}"
COMMAND cp -a $<TARGET_BUNDLE_DIR:lokinet> "${lokinet_app}"
COMMAND mkdir -p "${lokinet_app}/${lokinet_ext_dir}"
COMMAND cp -a $<TARGET_BUNDLE_DIR:lokinet-extension> "${lokinet_app}/${lokinet_ext_dir}/"
COMMAND mkdir -p "${lokinet_app}/Contents/Resources"
COMMAND cp -a "${mac_icon}" "${lokinet_app}/Contents/Resources/icon.icns"
)
if(BUILD_GUI)
add_dependencies(sign assemble_gui)
else()
add_dependencies(sign assemble)
endif()
endfunction()

@ -0,0 +1,113 @@
# macos specific cpack stuff goes here
# Here we build lokinet-network-control-panel into 'lokinet-gui.app' in "extra/" where a postinstall
# script will then move it to /Applications/.
set(LOKINET_GUI_REPO "https://github.com/oxen-io/loki-network-control-panel.git"
CACHE STRING "Can be set to override the default lokinet-gui git repository")
set(LOKINET_GUI_CHECKOUT "origin/master"
CACHE STRING "Can be set to specify a particular branch or tag to build from LOKINET_GUI_REPO")
set(MACOS_SIGN_APP "" # FIXME: it doesn't use a Apple Distribution key because WTF knows.
CACHE STRING "enable codesigning of the stuff inside the .app and the lokinet binary -- use a 'Apple Distribution' key (or description) from `security find-identity -v`")
set(MACOS_SIGN_PKG ""
CACHE STRING "enable codesigning of the .pkg -- use a 'Developer ID Installer' key (or description) from `security find-identity -v`")
set(MACOS_NOTARIZE_USER ""
CACHE STRING "set macos notarization username; can also set it in ~/.notarization.cmake")
set(MACOS_NOTARIZE_PASS ""
CACHE STRING "set macos notarization password; can also set it in ~/.notarization.cmake")
set(MACOS_NOTARIZE_ASC ""
CACHE STRING "set macos notarization asc provider; can also set it in ~/.notarization.cmake")
include(ExternalProject)
message(STATUS "Building UninstallLokinet.app")
ExternalProject_Add(lokinet-uninstaller
SOURCE_DIR ${CMAKE_SOURCE_DIR}/contrib/macos/uninstaller
CMAKE_ARGS -DCMAKE_INSTALL_PREFIX=${PROJECT_BINARY_DIR} -DMACOS_SIGN=${MACOS_SIGN_APP}
-DCMAKE_PREFIX_PATH=${CMAKE_PREFIX_PATH}
)
message(STATUS "Building LokinetGUI.app from ${LOKINET_GUI_REPO} @ ${LOKINET_GUI_CHECKOUT}")
if(NOT BUILD_STATIC_DEPS)
message(FATAL_ERROR "Building an installer on macos requires -DBUILD_STATIC_DEPS=ON")
endif()
ExternalProject_Add(lokinet-gui
DEPENDS oxenmq::oxenmq
GIT_REPOSITORY "${LOKINET_GUI_REPO}"
GIT_TAG "${LOKINET_GUI_CHECKOUT}"
CMAKE_ARGS -DMACOS_APP=ON -DCMAKE_INSTALL_PREFIX=${PROJECT_BINARY_DIR} -DMACOS_SIGN=${MACOS_SIGN_APP}
-DCMAKE_PREFIX_PATH=${CMAKE_PREFIX_PATH} -DBUILD_SHARED_LIBS=OFF
"-DOXENMQ_LIBRARIES=$<TARGET_FILE:oxenmq::oxenmq>$<SEMICOLON>$<TARGET_FILE:libzmq>$<SEMICOLON>$<TARGET_FILE:sodium>"
"-DOXENMQ_INCLUDE_DIRS=$<TARGET_PROPERTY:oxenmq::oxenmq,INCLUDE_DIRECTORIES>"
)
install(PROGRAMS ${CMAKE_SOURCE_DIR}/contrib/macos/lokinet_uninstall.sh
DESTINATION "bin/"
COMPONENT lokinet)
install(DIRECTORY ${PROJECT_BINARY_DIR}/LokinetGUI.app
DESTINATION "../../Applications/Lokinet"
USE_SOURCE_PERMISSIONS
COMPONENT gui
PATTERN "*"
)
install(DIRECTORY ${PROJECT_BINARY_DIR}/UninstallLokinet.app
DESTINATION "../../Applications/Lokinet"
USE_SOURCE_PERMISSIONS
COMPONENT gui
PATTERN "*"
)
# copy files that will be later moved by the postinstall script to proper locations
install(FILES ${CMAKE_SOURCE_DIR}/contrib/macos/lokinet_macos_daemon_script.sh
${CMAKE_SOURCE_DIR}/contrib/macos/network.loki.lokinet.daemon.plist
${CMAKE_SOURCE_DIR}/contrib/macos/lokinet-newsyslog.conf
DESTINATION "extra/"
COMPONENT lokinet)
set(CPACK_COMPONENTS_ALL lokinet gui)
set(CPACK_COMPONENT_LOKINET_DISPLAY_NAME "Lokinet Service")
set(CPACK_COMPONENT_LOKINET_DESCRIPTION "Main Lokinet runtime service, managed by Launchd")
set(CPACK_COMPONENT_GUI_DISPLAY_NAME "Lokinet GUI")
set(CPACK_COMPONENT_GUI_DESCRIPTION "Small GUI which provides stats and limited runtime control of the Lokinet service. Resides in the system tray.")
set(CPACK_GENERATOR "productbuild")
set(CPACK_PACKAGING_INSTALL_PREFIX "/opt/lokinet")
set(CPACK_PREINSTALL_LOKINET_SCRIPT ${CMAKE_SOURCE_DIR}/contrib/macos/preinstall)
set(CPACK_POSTFLIGHT_LOKINET_SCRIPT ${CMAKE_SOURCE_DIR}/contrib/macos/postinstall)
set(CPACK_RESOURCE_FILE_LICENSE "${PROJECT_SOURCE_DIR}/LICENSE.txt")
set(CPACK_PRODUCTBUILD_IDENTITY_NAME "${MACOS_SIGN_PKG}")
if(MACOS_SIGN_APP)
add_custom_target(sign ALL
echo "Signing lokinet and lokinet-vpn binaries"
COMMAND codesign -s "${MACOS_SIGN_APP}" --strict --options runtime --force -vvv $<TARGET_FILE:lokinet> $<TARGET_FILE:lokinet-vpn>
DEPENDS lokinet lokinet-vpn
)
endif()
if(MACOS_SIGN_APP AND MACOS_SIGN_PKG)
if(NOT MACOS_NOTARIZE_USER)
if(EXISTS "$ENV{HOME}/.notarization.cmake")
include("$ENV{HOME}/.notarization.cmake")
endif()
endif()
if(MACOS_NOTARIZE_USER AND MACOS_NOTARIZE_PASS AND MACOS_NOTARIZE_ASC)
message(STATUS "'notarization' target enabled")
configure_file(${CMAKE_SOURCE_DIR}/contrib/macos/notarize.py.in ${CMAKE_CURRENT_BINARY_DIR}/contrib/notarize.py ESCAPE_QUOTES @ONLY)
file(COPY ${CMAKE_CURRENT_BINARY_DIR}/contrib/notarize.py DESTINATION ${PROJECT_BINARY_DIR} FILE_PERMISSIONS OWNER_READ OWNER_WRITE OWNER_EXECUTE)
add_custom_target(notarize ./notarize.py)
else()
message(WARNING "Not enable 'notarization' target: signing is enabled but notarization info not provided. Create ~/.notarization.cmake or set cmake parameters directly")
endif()
endif()

@ -1,53 +0,0 @@
# ngtcp2's top-level CMakeLists.txt loads a bunch of crap we don't want (examples, a conflicting
# 'check' target, etc.); instead we directly include it's lib subdirectory to build just the
# library, but we have to set up a couple things to make that work:
function(add_ngtcp2_lib)
file(STRINGS ngtcp2/CMakeLists.txt ngtcp2_project_line REGEX "^project\\(ngtcp2 ")
if(NOT ngtcp2_project_line MATCHES "^project\\(ngtcp2 VERSION ([0-9]+)\\.([0-9]+)\\.([0-9]+)\\)$")
message(FATAL_ERROR "Unable to extract ngtcp2 version from ngtcp2/CMakeLists.txt (found '${ngtcp2_project_line}')")
endif()
set(PACKAGE_VERSION "${CMAKE_MATCH_1}.${CMAKE_MATCH_2}.${CMAKE_MATCH_3}")
include(ngtcp2/cmake/Version.cmake)
HexVersion(PACKAGE_VERSION_NUM ${CMAKE_MATCH_1} ${CMAKE_MATCH_2} ${CMAKE_MATCH_3})
configure_file("ngtcp2/lib/includes/ngtcp2/version.h.in" "ngtcp2/lib/includes/ngtcp2/version.h" @ONLY)
set(BUILD_SHARED_LIBS OFF)
# Checks for header files.
include(CheckIncludeFile)
check_include_file("arpa/inet.h" HAVE_ARPA_INET_H)
check_include_file("netinet/in.h" HAVE_NETINET_IN_H)
check_include_file("stddef.h" HAVE_STDDEF_H)
check_include_file("stdint.h" HAVE_STDINT_H)
check_include_file("stdlib.h" HAVE_STDLIB_H)
check_include_file("string.h" HAVE_STRING_H)
check_include_file("unistd.h" HAVE_UNISTD_H)
check_include_file("sys/endian.h" HAVE_SYS_ENDIAN_H)
check_include_file("endian.h" HAVE_ENDIAN_H)
check_include_file("byteswap.h" HAVE_BYTESWAP_H)
include(CheckTypeSize)
check_type_size("ssize_t" SIZEOF_SSIZE_T)
if(SIZEOF_SSIZE_T STREQUAL "")
set(ssize_t ptrdiff_t)
endif()
include(CheckSymbolExists)
if(HAVE_ENDIAN_H)
check_symbol_exists(be64toh "endian.h" HAVE_BE64TOH)
endif()
if(NOT HAVE_BE64TO AND HAVE_SYS_ENDIAN_H)
check_symbol_exists(be64toh "sys/endian.h" HAVE_BE64TOH)
endif()
check_symbol_exists(bswap_64 "byteswap.h" HAVE_BSWAP_64)
configure_file(ngtcp2/cmakeconfig.h.in ngtcp2/config.h)
include_directories("${CMAKE_CURRENT_BINARY_DIR}/ngtcp2") # for config.h
set(ENABLE_STATIC_LIB ON FORCE BOOL)
set(ENABLE_SHARED_LIB OFF FORCE BOOL)
add_subdirectory(ngtcp2/lib EXCLUDE_FROM_ALL)
target_compile_definitions(ngtcp2_static PRIVATE -DHAVE_CONFIG_H -D_GNU_SOURCE)
endfunction()

@ -0,0 +1,18 @@
set(WITH_STATIC OFF)
set(WITH_SHARED ON)
if("${SHADOW_ROOT}" STREQUAL "")
set(SHADOW_ROOT "$ENV{HOME}/.shadow")
endif("${SHADOW_ROOT}" STREQUAL "")
if(EXISTS "${SHADOW_ROOT}")
message(STATUS "SHADOW_ROOT = ${SHADOW_ROOT}")
else()
message(FATAL_ERROR "SHADOW_ROOT path does not exist: '${SHADOW_ROOT}'")
endif(EXISTS "${SHADOW_ROOT}")
set(CMAKE_MODULE_PATH "${SHADOW_ROOT}/share/cmake/Modules")
include_directories(${CMAKE_MODULE_PATH})
include(ShadowTools)
add_compile_options(-fno-inline -fno-strict-aliasing )
add_definitions(-DTESTNET=1)
add_definitions(-DLOKINET_SHADOW)
include_directories(${SHADOW_ROOT}/include)

@ -7,6 +7,15 @@ endif()
include(CheckCXXSourceCompiles)
include(CheckLibraryExists)
if(WITH_JEMALLOC)
find_package(Jemalloc REQUIRED)
if(NOT JEMALLOC_FOUND)
message(FATAL_ERROR "did not find jemalloc")
endif()
add_definitions(-DUSE_JEMALLOC)
message(STATUS "using jemalloc")
endif()
add_definitions(-DUNIX)
add_definitions(-DPOSIX)

@ -1,49 +1,31 @@
if(NOT WIN32)
return()
endif()
if (NOT STATIC_LINK)
message(FATAL_ERROR "windows requires static builds (thanks balmer)")
endif()
enable_language(RC)
option(WITH_WINDOWS_32 "build 32 bit windows" OFF)
# unlike unix where you get a *single* compiler ID string in .comment
# GNU ld sees fit to merge *all* the .ident sections in object files
# to .r[o]data section one after the other!
add_compile_options(-fno-ident -Wa,-mbig-obj)
set(gtest_force_shared_crt ON CACHE BOOL "" FORCE)
if(NOT MSVC_VERSION)
add_compile_options($<$<COMPILE_LANGUAGE:C>:-Wno-bad-function-cast>)
add_compile_options($<$<COMPILE_LANGUAGE:C>:-Wno-cast-function-type>)
add_compile_options($<$<COMPILE_LANGUAGE:CXX>:-fpermissive>)
# unlike unix where you get a *single* compiler ID string in .comment
# GNU ld sees fit to merge *all* the .ident sections in object files
# to .r[o]data section one after the other!
add_compile_options(-fno-ident -Wa,-mbig-obj)
link_libraries( -lws2_32 -lshlwapi -ldbghelp -luser32 -liphlpapi -lpsapi -luserenv)
# zmq requires windows xp or higher
add_definitions(-DWINVER=0x0501 -D_WIN32_WINNT=0x0501)
endif()
if(EMBEDDED_CFG)
link_libatomic()
endif()
set(WINTUN_VERSION 0.14.1 CACHE STRING "wintun version")
set(WINTUN_MIRROR https://www.wintun.net/builds
CACHE STRING "wintun mirror(s)")
set(WINTUN_SOURCE wintun-${WINTUN_VERSION}.zip)
set(WINTUN_HASH SHA256=07c256185d6ee3652e09fa55c0b673e2624b565e02c4b9091c79ca7d2f24ef51
CACHE STRING "wintun source hash")
set(WINDIVERT_VERSION 2.2.0-A CACHE STRING "windivert version")
set(WINDIVERT_MIRROR https://reqrypt.org/download
CACHE STRING "windivert mirror(s)")
set(WINDIVERT_SOURCE WinDivert-${WINDIVERT_VERSION}.zip)
set(WINDIVERT_HASH SHA256=2a7630aac0914746fbc565ac862fa096e3e54233883ac52d17c83107496b7a7f
CACHE STRING "windivert source hash")
add_definitions(-DWIN32_LEAN_AND_MEAN -DWIN32)
set(WINTUN_URL ${WINTUN_MIRROR}/${WINTUN_SOURCE}
CACHE STRING "wintun download url")
set(WINDIVERT_URL ${WINDIVERT_MIRROR}/${WINDIVERT_SOURCE}
CACHE STRING "windivert download url")
message(STATUS "Downloading wintun from ${WINTUN_URL}")
file(DOWNLOAD ${WINTUN_URL} ${CMAKE_BINARY_DIR}/wintun.zip EXPECTED_HASH ${WINTUN_HASH})
message(STATUS "Downloading windivert from ${WINDIVERT_URL}")
file(DOWNLOAD ${WINDIVERT_URL} ${CMAKE_BINARY_DIR}/windivert.zip EXPECTED_HASH ${WINDIVERT_HASH})
execute_process(COMMAND ${CMAKE_COMMAND} -E tar x ${CMAKE_BINARY_DIR}/wintun.zip
WORKING_DIRECTORY ${CMAKE_BINARY_DIR})
execute_process(COMMAND ${CMAKE_COMMAND} -E tar x ${CMAKE_BINARY_DIR}/windivert.zip
WORKING_DIRECTORY ${CMAKE_BINARY_DIR})
if (NOT STATIC_LINK AND NOT MSVC)
message("must ship compiler runtime libraries with this build: libwinpthread-1.dll, libgcc_s_dw2-1.dll, and libstdc++-6.dll")
message("for release builds, turn on STATIC_LINK in cmake options")
endif()

@ -1,46 +1,45 @@
install(DIRECTORY ${CMAKE_BINARY_DIR}/gui DESTINATION share COMPONENT gui)
if(WITH_WINDOWS_32)
install(FILES ${CMAKE_BINARY_DIR}/wintun/bin/x86/wintun.dll DESTINATION bin COMPONENT lokinet)
install(FILES ${CMAKE_BINARY_DIR}/WinDivert-${WINDIVERT_VERSION}/x86/WinDivert.sys DESTINATION lib COMPONENT lokinet)
install(FILES ${CMAKE_BINARY_DIR}/WinDivert-${WINDIVERT_VERSION}/x86/WinDivert.dll DESTINATION bin COMPONENT lokinet)
else()
install(FILES ${CMAKE_BINARY_DIR}/wintun/bin/amd64/wintun.dll DESTINATION bin COMPONENT lokinet)
install(FILES ${CMAKE_BINARY_DIR}/WinDivert-${WINDIVERT_VERSION}/x64/WinDivert64.sys DESTINATION lib COMPONENT lokinet)
install(FILES ${CMAKE_BINARY_DIR}/WinDivert-${WINDIVERT_VERSION}/x64/WinDivert.dll DESTINATION bin COMPONENT lokinet)
if(NOT GUI_ZIP_URL)
set(GUI_ZIP_URL "https://oxen.rocks/oxen-io/loki-network-control-panel/lokinet-gui-windows-32bit-v0.3.8.zip")
set(GUI_ZIP_HASH_OPTS EXPECTED_HASH SHA256=60c2b738cf997e5684f307e5222498fd09143d495a932924105a49bf59ded8bb)
endif()
set(BOOTSTRAP_FILE "${PROJECT_SOURCE_DIR}/contrib/bootstrap/mainnet.signed")
install(FILES ${BOOTSTRAP_FILE} DESTINATION share COMPONENT lokinet RENAME bootstrap.signed)
set(TUNTAP_URL "https://build.openvpn.net/downloads/releases/latest/tap-windows-latest-stable.exe")
set(TUNTAP_EXE "${CMAKE_BINARY_DIR}/tuntap-install.exe")
set(BOOTSTRAP_URL "https://seed.lokinet.org/lokinet.signed")
set(BOOTSTRAP_FILE "${CMAKE_BINARY_DIR}/bootstrap.signed")
file(DOWNLOAD
${TUNTAP_URL}
${TUNTAP_EXE})
file(DOWNLOAD
${BOOTSTRAP_URL}
${BOOTSTRAP_FILE})
set(win_ico "${PROJECT_BINARY_DIR}/lokinet.ico")
add_custom_command(OUTPUT "${win_ico}"
COMMAND ${PROJECT_SOURCE_DIR}/contrib/make-ico.sh ${PROJECT_SOURCE_DIR}/contrib/lokinet.svg "${win_ico}"
DEPENDS ${PROJECT_SOURCE_DIR}/contrib/lokinet.svg ${PROJECT_SOURCE_DIR}/contrib/make-ico.sh)
add_custom_target(icon ALL DEPENDS "${win_ico}")
file(DOWNLOAD
${GUI_ZIP_URL}
${CMAKE_BINARY_DIR}/lokinet-gui.zip
${GUI_ZIP_HASH_OPTS})
execute_process(COMMAND ${CMAKE_COMMAND} -E tar xf ${CMAKE_BINARY_DIR}/lokinet-gui.zip
WORKING_DIRECTORY ${CMAKE_BINARY_DIR})
install(DIRECTORY ${CMAKE_BINARY_DIR}/gui DESTINATION share COMPONENT gui)
install(PROGRAMS ${TUNTAP_EXE} DESTINATION bin COMPONENT tuntap)
install(FILES ${BOOTSTRAP_FILE} DESTINATION share COMPONENT lokinet)
set(CPACK_PACKAGE_INSTALL_DIRECTORY "Lokinet")
set(CPACK_NSIS_MUI_ICON "${PROJECT_BINARY_DIR}/lokinet.ico")
set(CPACK_NSIS_MUI_ICON "${CMAKE_SOURCE_DIR}/win32-setup/lokinet.ico")
set(CPACK_NSIS_DEFINES "RequestExecutionLevel admin")
set(CPACK_NSIS_ENABLE_UNINSTALL_BEFORE_INSTALL ON)
function(read_nsis_file filename outvar)
file(STRINGS "${filename}" _outvar)
list(TRANSFORM _outvar REPLACE "\\\\" "\\\\\\\\")
list(JOIN _outvar "\\n" out)
set(${outvar} ${out} PARENT_SCOPE)
endfunction()
read_nsis_file("${CMAKE_SOURCE_DIR}/win32-setup/extra_preinstall.nsis" _extra_preinstall)
read_nsis_file("${CMAKE_SOURCE_DIR}/win32-setup/extra_install.nsis" _extra_install)
read_nsis_file("${CMAKE_SOURCE_DIR}/win32-setup/extra_uninstall.nsis" _extra_uninstall)
read_nsis_file("${CMAKE_SOURCE_DIR}/win32-setup/extra_create_icons.nsis" _extra_create_icons)
read_nsis_file("${CMAKE_SOURCE_DIR}/win32-setup/extra_delete_icons.nsis" _extra_delete_icons)
set(CPACK_NSIS_EXTRA_PREINSTALL_COMMANDS "${_extra_preinstall}")
set(CPACK_NSIS_EXTRA_INSTALL_COMMANDS "${_extra_install}")
set(CPACK_NSIS_EXTRA_UNINSTALL_COMMANDS "${_extra_uninstall}")
set(CPACK_NSIS_CREATE_ICONS_EXTRA "${_extra_create_icons}")
set(CPACK_NSIS_DELETE_ICONS_EXTRA "${_extra_delete_icons}")
set(CPACK_NSIS_COMPRESSOR "/SOLID lzma")
set(CPACK_NSIS_EXTRA_INSTALL_COMMANDS "ifFileExists $INSTDIR\\\\bin\\\\tuntap-install.exe 0 +2\\nExecWait '$INSTDIR\\\\bin\\\\tuntap-install.exe /S'\\nExecWait '$INSTDIR\\\\bin\\\\lokinet.exe --install'\\nExecWait 'sc failure lokinet reset= 60 actions= restart/1000'\\nExecWait '$INSTDIR\\\\bin\\\\lokinet.exe -g C:\\\\ProgramData\\\\lokinet\\\\lokinet.ini'\\nCopyFiles '$INSTDIR\\\\share\\\\bootstrap.signed' C:\\\\ProgramData\\\\lokinet\\\\bootstrap.signed\\n")
set(CPACK_NSIS_EXTRA_UNINSTALL_COMMANDS "ExecWait 'net stop lokinet'\\nExecWait 'taskkill /f /t /im lokinet-gui.exe'\\nExecWait '$INSTDIR\\\\bin\\\\lokinet.exe --remove'\\nRMDir /r /REBOOTOK C:\\\\ProgramData\\\\lokinet")
set(CPACK_NSIS_CREATE_ICONS_EXTRA
"CreateShortCut '$SMPROGRAMS\\\\$STARTMENU_FOLDER\\\\Lokinet.lnk' '$INSTDIR\\\\share\\\\gui\\\\lokinet-gui.exe'"
)
set(CPACK_NSIS_DELETE_ICONS_EXTRA
"Delete '$SMPROGRAMS\\\\$START_MENU\\\\Lokinet.lnk'"
)
get_cmake_property(CPACK_COMPONENTS_ALL COMPONENTS)
list(REMOVE_ITEM CPACK_COMPONENTS_ALL "Unspecified")

@ -1,67 +0,0 @@
#!/bin/bash
set -e
default_abis="armeabi-v7a arm64-v8a x86_64"
build_abis=${ABIS:-$default_abis}
test x$NDK = x && test -e /usr/lib/android-ndk && export NDK=/usr/lib/android-ndk
test x$NDK = x && exit 1
echo "building abis: $build_abis"
root=$(readlink -f "$1")
shift
build=$(readlink -f "$1")
shift
mkdir -p $build
cd $build
for abi in $build_abis; do
mkdir -p build-$abi
cd build-$abi
cmake \
-S "$root" -B . \
-G 'Unix Makefiles' \
-DANDROID=ON \
-DANDROID_ABI=$abi \
-DANDROID_ARM_MODE=arm \
-DANDROID_PLATFORM=android-23 \
-DANDROID_API=23 \
-DANDROID_STL=c++_static \
-DCMAKE_TOOLCHAIN_FILE=$NDK/build/cmake/android.toolchain.cmake \
-DBUILD_STATIC_DEPS=ON \
-DBUILD_PACKAGE=ON \
-DBUILD_SHARED_LIBS=OFF \
-DBUILD_TESTING=OFF \
-DWITH_TESTS=OFF \
-DWITH_BOOTSTRAP=OFF \
-DNATIVE_BUILD=OFF \
-DSTATIC_LINK=ON \
-DWITH_SYSTEMD=OFF \
-DFORCE_OXENMQ_SUBMODULE=ON \
-DFORCE_OXENC_SUBMODULE=ON \
-DFORCE_FMT_SUBMODULE=ON \
-DFORCE_SPDLOG_SUBMODULE=ON \
-DFORCE_NLOHMANN_SUBMODULE=ON \
-DSUBMODULE_CHECK=OFF \
-DWITH_LTO=OFF \
-DCMAKE_BUILD_TYPE=Release \
"$@"
cd -
done
rm -f $build/Makefile
echo "# generated makefile" >> $build/Makefile
echo "all: $build_abis" >> $build/Makefile
for abi in $build_abis; do
echo -ne "$abi:\n\t" >> $build/Makefile
echo -ne '$(MAKE) -C ' >> $build/Makefile
echo "build-$abi lokinet-android" >> $build/Makefile
echo -ne "\tmkdir -p out/$abi && cp build-$abi/jni/liblokinet-android.so out/$abi/liblokinet-android.so\n\n" >> $build/Makefile
echo -ne "clean-$abi:\n\t" >> $build/Makefile
echo -ne '$(MAKE) -C ' >> $build/Makefile
echo "build-$abi clean" >> $build/Makefile
done
echo -ne "clean:" >> $build/Makefile
for targ in $build_abis ; do echo -ne " clean-$targ" >> $build/Makefile ; done
echo "" >> $build/Makefile

@ -1,8 +0,0 @@
#!/bin/bash
set -e
set +x
root="$(readlink -f $(dirname $0)/../)"
cd "$root"
./contrib/android-configure.sh . build-android "$@"
make -C build-android -j ${JOBS:-$(nproc)}

@ -1,109 +0,0 @@
#!/usr/bin/python3
import sys
import pprint
if len(sys.argv) == 1 or (len(sys.argv) == 2 and sys.argv[1] == '-'):
f = sys.stdin.buffer
elif len(sys.argv) != 2 or sys.argv[1].startswith('-'):
print("Usage: {} FILE -- dumps a bencoded file".format(sys.argv[0]), file=sys.stderr)
sys.exit(1)
else:
f = open(sys.argv[1], 'rb')
initial = f.peek(2)
is_hex = False
if initial.startswith(b'64') or initial.startswith(b'6c'):
print("Input looks like hex bencoded data; parsing as hex input", file=sys.stderr)
is_hex = True
class HexPrinter():
def __init__(self, data):
self.data = data
def __repr__(self):
return "hex({} bytes):'{}'".format(len(self.data), self.data.hex())
def next_byte():
if is_hex:
pair = f.read(2)
assert pair is not None and len(pair) == 2
b = int(pair, 16).to_bytes(1, 'big')
else:
b = f.read(1)
assert b is not None and len(b) == 1
return b
def parse_int():
s = b''
x = next_byte()
while x in b"0123456789-":
s += x
x = next_byte()
assert x == b'e' and len(s) > 0, "Invalid integer encoding"
return int(s)
def parse_string(s):
x = next_byte()
while x in b"0123456789":
s += x
x = next_byte()
assert x == b':', "Invalid string encoding"
s = int(s)
if is_hex:
data = bytes.fromhex(f.read(2*s).decode('ascii'))
else:
data = f.read(s)
assert len(data) == s, "Truncated string data"
# If the string is ascii then convert to string:
if all(0x20 <= b <= 0x7e for b in data):
return data.decode()
# Otherwise display as hex:
return HexPrinter(data)
def parse_dict():
d = {}
last_key = None
while True:
t = next_byte()
if t == b'e':
return d
assert t in b"0123456789", "Invalid dict: dict keys must be strings"
key = parse_string(t)
raw_key = key.data if isinstance(key, HexPrinter) else key.encode()
if last_key is not None and raw_key <= last_key:
print("Warning: found out-of-order dict keys ({} after {})".format(raw_key, last_key), file=sys.stderr)
last_key = raw_key
t = next_byte()
d[key] = parse_thing(t)
def parse_list():
l = []
while True:
t = next_byte()
if t == b'e':
return l
l.append(parse_thing(t))
def parse_thing(t):
if t == b'd':
return parse_dict()
if t == b'l':
return parse_list()
if t == b'i':
return parse_int()
if t in b"0123456789":
return parse_string(t)
assert False, "Parsing error: encountered invalid type '{}'".format(t)
pprint.PrettyPrinter(
indent=2
).pprint(parse_thing(next_byte()))

@ -0,0 +1 @@
lokinet-bootserv

@ -0,0 +1,29 @@
# replace your.server.tld with your server's fqdn
server {
listen 80;
server_name your.server.tld;
location / {
return 302 https://your.server.tld$request_uri;
}
location /.well-known/acme-challenge {
root /var/www/letsencrypt;
}
}
server {
listen 443 ssl;
server_name your.server.tld;
ssl_certificate /etc/letsencrypt/live/your.server.tld/fullchain.pem;
ssl_certificate_key /etc/letsencrypt/live/your.server.tld/privkey.pem;
location / {
root /var/www/lokinet-bootserv;
}
location /bootstrap.signed {
include /etc/nginx/fastcgi_params;
fastcgi_param SCRIPT_FILENAME /usr/local/bin/lokinet-bootserv;
fastcgi_pass unix://tmp/cgi.sock;
}
}

@ -0,0 +1,4 @@
# set me to where the nodedb is for lokinet
#[nodedb]
#dir=/path/to/nodedb/

@ -0,0 +1,20 @@
SRC = $(sort $(wildcard src/*.cpp))
OBJS = $(SRC:.cpp=.cpp.o)
CGI_EXE = lokinet-bootserv
CXX = clang++
all: build
build: $(CGI_EXE)
%.cpp.o: %.cpp
$(CXX) -g -std=c++17 -c -Wall -Werror -Wpedantic $^ -o $^.o
$(CGI_EXE): $(OBJS)
$(CXX) -o $(CGI_EXE) $^
clean:
rm -f $(CGI_EXE) $(OBJS)

@ -0,0 +1,35 @@
# lokinet-bootserv
cgi executable for serving a random RC for bootstrap from a nodedb
## configuring
copy the example config (privileged)
# cp configs/lokinet-bootserv.ini /usr/local/etc/lokinet-bootserv.ini
edit config to have proper values,
specifically make sure the `[nodedb]` section has a `dir` value that points to a static copy of a healthy nodedb
## building
to build:
$ make
## installing (priviledged)
install cgi binary:
# cp lokinet-bootserv /usr/local/bin/lokinet-bootserv
set up with nginx cgi:
# cp configs/lokinet-bootserv-nginx.conf /etc/nginx/sites-available/lokinet-bootserv.conf
# ln -s /etc/nginx/sites-available/lokinet-bootserv.conf /etc/nginx/sites-enabled/
## maintainence
add the following to crontab
0 0 * * * /usr/local/bin/lokinet-bootserv --cron

@ -0,0 +1,171 @@
#include "lokinet-cgi.hpp"
#include <fstream>
#include <dirent.h>
#include <list>
#include <sstream>
namespace lokinet
{
namespace bootserv
{
CGIHandler::CGIHandler(std::ostream& o) : Handler(o)
{
}
CGIHandler::~CGIHandler()
{
}
int
CGIHandler::Exec(const Config& conf)
{
const char* e = getenv("REQUEST_METHOD");
if(e == nullptr)
return ReportError("$REQUEST_METHOD not set");
std::string_view method(e);
if(method != "GET")
{
out << "Content-Type: text/plain" << std::endl;
out << "Status: 405 Method Not Allowed" << std::endl << std::endl;
return 0;
}
std::string fname;
if(!conf.VisitSection(
"nodedb", [&](const Config::Section_t& sect) -> bool {
auto itr = sect.find("dir");
if(itr == sect.end())
return false;
fname = PickRandomFileInDir(
std::string(itr->second.data(), itr->second.size()));
return true;
}))
return ReportError("bad values in nodedb section of config");
if(fname.empty())
{
// no files in nodedb
out << "Content-Type: text/plain" << std::endl;
out << "Status: 404 Not Found" << std::endl << std::endl;
return 0;
}
return ServeFile(fname.c_str(), "application/octect-stream");
}
std::string
CGIHandler::PickRandomFileInDir(std::string dirname) const
{
// collect files
std::list< std::string > files;
{
DIR* d = opendir(dirname.c_str());
if(d == nullptr)
{
return "";
};
std::list< std::string > subdirs;
dirent* ent = nullptr;
while((ent = readdir(d)))
{
std::string_view f = ent->d_name;
if(f != "." && f != "..")
{
std::stringstream ss;
ss << dirname;
ss << '/';
ss << f;
subdirs.emplace_back(ss.str());
}
}
closedir(d);
for(const auto& subdir : subdirs)
{
d = opendir(subdir.c_str());
if(d)
{
while((ent = readdir(d)))
{
std::string_view f;
f = ent->d_name;
if(f != "." && f != ".."
&& f.find_last_of(".signed") != std::string_view::npos)
{
std::stringstream ss;
ss << subdir << "/" << f;
files.emplace_back(ss.str());
}
}
closedir(d);
}
}
}
uint32_t randint;
{
std::basic_ifstream< uint32_t > randf("/dev/urandom");
if(!randf.is_open())
return "";
randf.read(&randint, 1);
}
auto itr = files.begin();
if(files.size() > 1)
std::advance(itr, randint % files.size());
return *itr;
}
int
CGIHandler::ServeFile(const char* fname, const char* contentType) const
{
std::ifstream f(fname);
if(f.is_open())
{
f.seekg(0, std::ios::end);
auto sz = f.tellg();
f.seekg(0, std::ios::beg);
if(sz)
{
out << "Content-Type: " << contentType << std::endl;
out << "Status: 200 OK" << std::endl;
out << "Content-Length: " << std::to_string(sz) << std::endl
<< std::endl;
char buf[512] = {0};
size_t r = 0;
while((r = f.readsome(buf, sizeof(buf))) > 0)
out.write(buf, r);
out << std::flush;
}
else
{
out << "Content-Type: text/plain" << std::endl;
out << "Status: 500 Internal Server Error" << std::endl << std::endl;
out << "could not serve '" << fname << "' as it is an empty file"
<< std::endl;
}
}
else
{
out << "Content-Type: text/plain" << std::endl;
out << "Status: 404 Not Found" << std::endl << std::endl;
out << "could not serve '" << fname
<< "' as it does not exist on the filesystem" << std::endl;
}
return 0;
}
int
CGIHandler::ReportError(const char* err)
{
out << "Content-Type: text/plain" << std::endl;
out << "Status: 500 Internal Server Error" << std::endl << std::endl;
out << err << std::endl;
return 0;
}
Handler_ptr
NewCGIHandler(std::ostream& out)
{
return std::make_unique< CGIHandler >(out);
}
} // namespace bootserv
} // namespace lokinet

@ -0,0 +1,43 @@
#ifndef LOKINET_BOOTSERV_HANDLER_HPP
#define LOKINET_BOOTSERV_HANDLER_HPP
#include <iostream>
#include "lokinet-config.hpp"
namespace lokinet
{
namespace bootserv
{
struct Handler
{
Handler(std::ostream& o) : out(o){};
virtual ~Handler(){};
/// handle command
/// return exit code
virtual int
Exec(const Config& conf) = 0;
/// report an error to system however that is done
/// return exit code
virtual int
ReportError(const char* err) = 0;
protected:
std::ostream& out;
};
using Handler_ptr = std::unique_ptr< Handler >;
/// create cgi handler
Handler_ptr
NewCGIHandler(std::ostream& out);
/// create cron handler
Handler_ptr
NewCronHandler(std::ostream& out);
} // namespace bootserv
} // namespace lokinet
#endif

@ -0,0 +1,31 @@
#ifndef BOOTSERV_LOKINET_CRON_HPP
#define BOOTSERV_LOKINET_CRON_HPP
#include "handler.hpp"
namespace lokinet
{
namespace bootserv
{
struct CGIHandler final : public Handler
{
CGIHandler(std::ostream& o);
~CGIHandler();
int
Exec(const Config& conf) override;
int
ReportError(const char* err) override;
int
ServeFile(const char* fname, const char* mime) const;
std::string
PickRandomFileInDir(std::string dirname) const;
};
} // namespace bootserv
} // namespace lokinet
#endif

@ -0,0 +1,132 @@
#include "lokinet-config.hpp"
#include <fstream>
#include <list>
#include <iostream>
namespace lokinet
{
namespace bootserv
{
const char* Config::DefaultPath = "/usr/local/etc/lokinet-bootserv.ini";
bool
Config::LoadFile(const char* fname)
{
{
std::ifstream f(fname);
if(!f.is_open())
return false;
f.seekg(0, std::ios::end);
m_Data.resize(f.tellg());
f.seekg(0, std::ios::beg);
if(m_Data.size() == 0)
return false;
f.read(m_Data.data(), m_Data.size());
}
return Parse();
}
void
Config::Clear()
{
m_Config.clear();
m_Data.clear();
}
bool
Config::Parse()
{
std::list< String_t > lines;
{
auto itr = m_Data.begin();
// split into lines
while(itr != m_Data.end())
{
auto beg = itr;
while(itr != m_Data.end() && *itr != '\n' && *itr != '\r')
++itr;
lines.emplace_back(std::addressof(*beg), (itr - beg));
if(itr == m_Data.end())
break;
++itr;
}
}
String_t sectName;
for(const auto& line : lines)
{
String_t realLine;
auto comment = line.find_first_of(';');
if(comment == String_t::npos)
comment = line.find_first_of('#');
if(comment == String_t::npos)
realLine = line;
else
realLine = line.substr(0, comment);
// blank or commented line?
if(realLine.size() == 0)
continue;
// find delimiters
auto sectOpenPos = realLine.find_first_of('[');
auto sectClosPos = realLine.find_first_of(']');
auto kvDelim = realLine.find_first_of('=');
if(sectOpenPos != String_t::npos && sectClosPos != String_t::npos
&& kvDelim == String_t::npos)
{
// section header
// clamp whitespaces
++sectOpenPos;
while(std::isspace(realLine[sectOpenPos])
&& sectOpenPos != sectClosPos)
++sectOpenPos;
--sectClosPos;
while(std::isspace(realLine[sectClosPos])
&& sectClosPos != sectOpenPos)
--sectClosPos;
// set section name
sectName = realLine.substr(sectOpenPos, sectClosPos);
}
else if(kvDelim != String_t::npos)
{
// key value pair
String_t::size_type k_start = 0;
String_t::size_type k_end = kvDelim;
String_t::size_type v_start = kvDelim + 1;
String_t::size_type v_end = realLine.size() - 1;
// clamp whitespaces
while(std::isspace(realLine[k_start]) && k_start != kvDelim)
++k_start;
while(std::isspace(realLine[k_end]) && k_end != k_start)
--k_end;
while(std::isspace(realLine[v_start]) && v_start != v_end)
++v_start;
while(std::isspace(realLine[v_end]))
--v_end;
// sect.k = v
String_t k = realLine.substr(k_start, k_end);
String_t v = realLine.substr(v_start, v_end);
Section_t& sect = m_Config[sectName];
sect[k] = v;
}
else // malformed?
return false;
}
return true;
}
bool
Config::VisitSection(
const char* name,
std::function< bool(const Section_t& sect) > visit) const
{
auto itr = m_Config.find(name);
if(itr == m_Config.end())
return false;
return visit(itr->second);
}
} // namespace bootserv
} // namespace lokinet

@ -0,0 +1,47 @@
#ifndef LOKINET_BOOTSERV_CONFIG_HPP
#define LOKINET_BOOTSERV_CONFIG_HPP
#include <unordered_map>
#include <string_view>
#include <functional>
#include <memory>
#include <vector>
namespace lokinet
{
namespace bootserv
{
struct Config
{
using String_t = std::string_view;
using Section_t = std::unordered_map< String_t, String_t >;
using Config_impl_t = std::unordered_map< String_t, Section_t >;
static const char* DefaultPath;
/// clear config
void
Clear();
/// load config file for bootserv
/// return true on success
/// return false on error
bool
LoadFile(const char* fname);
/// visit a section in config read only by name
/// return false if no section or value propagated from visitor
bool
VisitSection(const char* name,
std::function< bool(const Section_t&) > visit) const;
private:
bool
Parse();
std::vector< char > m_Data;
Config_impl_t m_Config;
};
} // namespace bootserv
} // namespace lokinet
#endif

@ -0,0 +1,37 @@
#include "lokinet-cron.hpp"
namespace lokinet
{
namespace bootserv
{
CronHandler::CronHandler(std::ostream& o) : Handler(o)
{
}
CronHandler::~CronHandler()
{
}
int
CronHandler::Exec(const Config& conf)
{
// this runs the cron tasks
// TODO: implement me
return 0;
}
int
CronHandler::ReportError(const char* err)
{
out << "error: " << err << std::endl;
return 1;
}
Handler_ptr
NewCronHandler(std::ostream& out)
{
return std::make_unique< CronHandler >(out);
}
} // namespace bootserv
} // namespace lokinet

@ -0,0 +1,25 @@
#ifndef BOOTSERV_LOKINET_CRON_HPP
#define BOOTSERV_LOKINET_CRON_HPP
#include "handler.hpp"
namespace lokinet
{
namespace bootserv
{
struct CronHandler final : public Handler
{
CronHandler(std::ostream& o);
~CronHandler();
int
Exec(const Config& conf) override;
int
ReportError(const char* err) override;
};
} // namespace bootserv
} // namespace lokinet
#endif

@ -0,0 +1,60 @@
#include "handler.hpp"
#include "lokinet-config.hpp"
#include <getopt.h>
#include <string_view>
#include <sstream>
static int
printhelp(const char* exe)
{
std::cout << "usage: " << exe << " [--cron] [--conf /path/to/alt/config.ini]"
<< std::endl;
return 1;
}
int
main(int argc, char* argv[])
{
bool RunCron = false;
const char* confFile = lokinet::bootserv::Config::DefaultPath;
lokinet::bootserv::Config config;
lokinet::bootserv::Handler_ptr handler;
option longopts[] = {{"cron", no_argument, 0, 'C'},
{"help", no_argument, 0, 'h'},
{"conf", required_argument, 0, 'c'},
{0, 0, 0, 0}};
int c = 0;
int index = 0;
while((c = getopt_long(argc, argv, "hCc:", longopts, &index)) != -1)
{
switch(c)
{
case 'h':
return printhelp(argv[0]);
case 'C':
RunCron = true;
break;
case 'c':
confFile = optarg;
break;
}
}
if(RunCron)
handler = lokinet::bootserv::NewCronHandler(std::cout);
else
handler = lokinet::bootserv::NewCGIHandler(std::cout);
if(!config.LoadFile(confFile))
{
std::stringstream ss;
ss << "failed to load " << confFile;
return handler->ReportError(ss.str().c_str());
}
else
return handler->Exec(config);
}

Binary file not shown.

@ -1 +0,0 @@
ld1:ald1:ci2e1:d3:iwp1:e32:9ãxÚsX«l%ìû€ê<,sØ›•©÷ïå_1:i21:::ffff:144.76.164.2021:pi1666e1:vi0eee1:i5:gamma1:k32:ÞÊðòm=o„ZÐ1ÿßmcŒ%»¸ÿû¾™SĹ1:p32:!EÏâéz˜ý:Š‹úý… /0¡Ú„ Ãݪ„µNçB1:rli0ei0ei8ei3ee1:ui1614788310454e1:vi0e1:xle1:z64:Œ¤u G¿”D“=Œxµ¢{ïÌ51þ`í߀ùEâw m)q2Øg¯±˜øš ï³À)˜TÑP•´ò³ö—Á1ee

@ -0,0 +1,8 @@
FROM debian:testing
RUN /bin/bash -c 'echo "man-db man-db/auto-update boolean false" | debconf-set-selections'
RUN /bin/bash -c 'sed -i "s/main/main contrib/g" /etc/apt/sources.list'
RUN /bin/bash -c 'apt-get -o=Dpkg::Use-Pty=0 -q update && apt-get -o=Dpkg::Use-Pty=0 -q dist-upgrade -y && apt-get -o=Dpkg::Use-Pty=0 -q install -y android-sdk google-android-ndk-installer'
RUN /bin/bash -c 'apt-get -o=Dpkg::Use-Pty=0 -q -y install wget git pkg-config'
RUN /bin/bash -c 'wget https://services.gradle.org/distributions/gradle-6.3-bin.zip -O /tmp/gradle.zip && unzip -d /opt/ /tmp/gradle.zip && ln -s /opt/gradle*/bin/gradle /usr/local/bin/gradle && rm -f /tmp/gradle.zip'
RUN /bin/bash -c 'git clone https://github.com/Shadowstyler/android-sdk-licenses.git /tmp/android-sdk-licenses && cp -a /tmp/android-sdk-licenses/*-license /usr/lib/android-sdk/licenses && rm -rf /tmp/android-sdk-licenses'
RUN /bin/bash -c 'apt-get -o=Dpkg::Use-Pty=0 -q -y install automake libtool'

@ -0,0 +1,3 @@
FROM debian:sid
RUN /bin/bash -c 'echo "man-db man-db/auto-update boolean false" | debconf-set-selections'
RUN /bin/bash -c 'apt-get -o=Dpkg::Use-Pty=0 -q update && apt-get -o=Dpkg::Use-Pty=0 -q dist-upgrade -y && apt-get -o=Dpkg::Use-Pty=0 -q install -y eatmydata gdb cmake git ninja-build pkg-config ccache clang-11 libsodium-dev libsystemd-dev python3-dev libuv1-dev libunbound-dev nettle-dev libssl-dev libevent-dev libsqlite3-dev libboost-thread-dev libboost-serialization-dev libboost-program-options-dev libgtest-dev libminiupnpc-dev libunwind8-dev libreadline-dev libhidapi-dev libusb-1.0.0-dev qttools5-dev libcurl4-openssl-dev'

@ -0,0 +1,3 @@
FROM debian:stable
RUN /bin/bash -c 'echo "man-db man-db/auto-update boolean false" | debconf-set-selections'
RUN /bin/bash -c 'apt-get -o=Dpkg::Use-Pty=0 -q update && apt-get -o=Dpkg::Use-Pty=0 -q dist-upgrade -y && apt-get -o=Dpkg::Use-Pty=0 -q install -y eatmydata gdb cmake git ninja-build pkg-config ccache g++ libsodium-dev libsystemd-dev python3-dev libuv1-dev libunbound-dev nettle-dev libssl-dev libevent-dev libsqlite3-dev libboost-thread-dev liboost-serialization-dev libboost-program-options-dev libgtest-dev libminiupnpc-dev libunwind8-dev libreadline-dev libhidapi-dev libusb-1.0.0-dev qttools5-dev libcurl4-openssl-dev'

@ -0,0 +1,4 @@
FROM debian:testing
RUN /bin/bash -c 'echo "man-db man-db/auto-update boolean false" | debconf-set-selections'
RUN /bin/bash -c 'apt-get -o=Dpkg::Use-Pty=0 -q update && apt-get -o=Dpkg::Use-Pty=0 -q dist-upgrade -y && apt-get -o=Dpkg::Use-Pty=0 -q install -y eatmydata build-essential cmake git ninja-build pkg-config ccache g++-mingw-w64-x86-64-posix nsis zip automake libtool autoconf make qttools5-dev file gperf patch openssh-client'
RUN /bin/bash -c 'update-alternatives --set x86_64-w64-mingw32-gcc /usr/bin/x86_64-w64-mingw32-gcc-posix && update-alternatives --set x86_64-w64-mingw32-gcc /usr/bin/x86_64-w64-mingw32-gcc-posix'

@ -0,0 +1,3 @@
FROM debian:sid
RUN /bin/bash -c 'echo "man-db man-db/auto-update boolean false" | debconf-set-selections'
RUN /bin/bash -c 'apt-get -o=Dpkg::Use-Pty=0 -q update && apt-get -o=Dpkg::Use-Pty=0 -q install -y eatmydata git clang-format-11'

@ -1,9 +1,8 @@
## drone-ci docker jizz
To rebuild all ci images and push them to the oxen registry server do:
To rebuild all ci images and push them to a registry server do:
$ docker login registry.oxen.rocks
$ ./rebuild-docker-images.py
$ docker login your.registry.here
$ ./rebuild-docker-images.sh your.registry.here *.dockerfile
If you aren't part of the Oxen team, you'll likely need to set up your own registry and change
registry.oxen.rocks to your own domain name in order to do anything useful with this.
The docker images will be `your.registry.here/lokinet-ci-*`for each *.dockerfile in this directory

@ -1,350 +0,0 @@
#!/usr/bin/env python3
import subprocess
import tempfile
import optparse
import sys
from concurrent.futures import ThreadPoolExecutor
import threading
parser = optparse.OptionParser()
parser.add_option("--no-cache", action="store_true",
help="Run `docker build` with the `--no-cache` option to ignore existing images")
parser.add_option("--parallel", "-j", type="int", default=1,
help="Run up to this many builds in parallel")
parser.add_option("--distro", type="string", default="",
help="Build only this distro; should be DISTRO-CODE or DISTRO-CODE/ARCH, "
"e.g. debian-sid/amd64")
(options, args) = parser.parse_args()
registry_base = 'registry.oxen.rocks/lokinet-ci-'
distros = [*(('debian', x) for x in ('sid', 'stable', 'testing', 'bullseye', 'buster')),
*(('ubuntu', x) for x in ('rolling', 'lts', 'impish', 'hirsute', 'focal', 'bionic'))]
if options.distro:
d = options.distro.split('-')
if len(d) != 2 or d[0] not in ('debian', 'ubuntu') or not d[1]:
print("Bad --distro value '{}'".format(options.distro), file=sys.stderr)
sys.exit(1)
distros = [(d[0], d[1].split('/')[0])]
manifests = {} # "image:latest": ["image/amd64", "image/arm64v8", ...]
manifestlock = threading.Lock()
def arches(distro):
if options.distro and '/' in options.distro:
arch = options.distro.split('/')
if arch not in ('amd64', 'i386', 'arm64v8', 'arm32v7'):
print("Bad --distro value '{}'".format(options.distro), file=sys.stderr)
sys.exit(1)
return [arch]
a = ['amd64', 'arm64v8', 'arm32v7']
if distro[0] == 'debian' or distro == ('ubuntu', 'bionic'):
a.append('i386') # i386 builds don't work on later ubuntu
return a
hacks = {
registry_base + 'ubuntu-bionic-builder': """g++-8 \
&& mkdir -p /usr/lib/x86_64-linux-gnu/pgm-5.2/include""",
}
failure = False
lineno = 0
linelock = threading.Lock()
def print_line(myline, value):
linelock.acquire()
global lineno
if sys.__stdout__.isatty():
jump = lineno - myline
print("\033[{jump}A\r\033[K{value}\033[{jump}B\r".format(jump=jump, value=value), end='')
sys.stdout.flush()
else:
print(value)
linelock.release()
def run_or_report(*args, myline):
try:
subprocess.run(
args, check=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, encoding='utf8')
except subprocess.CalledProcessError as e:
with tempfile.NamedTemporaryFile(suffix=".log", delete=False) as log:
log.write("Error running {}: {}\n\nOutput:\n\n".format(' '.join(args), e).encode())
log.write(e.output.encode())
global failure
failure = True
print_line(myline, "\033[31;1mError! See {} for details".format(log.name))
raise e
def build_tag(tag_base, arch, contents):
if failure:
raise ChildProcessError()
linelock.acquire()
global lineno
myline = lineno
lineno += 1
print()
linelock.release()
with tempfile.NamedTemporaryFile() as dockerfile:
dockerfile.write(contents.encode())
dockerfile.flush()
tag = '{}/{}'.format(tag_base, arch)
print_line(myline, "\033[33;1mRebuilding \033[35;1m{}\033[0m".format(tag))
run_or_report('docker', 'build', '--pull', '-f', dockerfile.name, '-t', tag,
*(('--no-cache',) if options.no_cache else ()), '.', myline=myline)
print_line(myline, "\033[33;1mPushing \033[35;1m{}\033[0m".format(tag))
run_or_report('docker', 'push', tag, myline=myline)
print_line(myline, "\033[32;1mFinished build \033[35;1m{}\033[0m".format(tag))
latest = tag_base + ':latest'
global manifests
manifestlock.acquire()
if latest in manifests:
manifests[latest].append(tag)
else:
manifests[latest] = [tag]
manifestlock.release()
def base_distro_build(distro, arch):
tag = '{r}{distro[0]}-{distro[1]}-base'.format(r=registry_base, distro=distro)
codename = 'latest' if distro == ('ubuntu', 'lts') else distro[1]
build_tag(tag, arch, """
FROM {}/{}:{}
RUN /bin/bash -c 'echo "man-db man-db/auto-update boolean false" | debconf-set-selections'
RUN apt-get -o=Dpkg::Use-Pty=0 -q update \
&& apt-get -o=Dpkg::Use-Pty=0 -q dist-upgrade -y \
&& apt-get -o=Dpkg::Use-Pty=0 -q autoremove -y \
{hacks}
""".format(arch, distro[0], codename, hacks=hacks.get(tag, '')))
def distro_build(distro, arch):
prefix = '{r}{distro[0]}-{distro[1]}'.format(r=registry_base, distro=distro)
fmtargs = dict(arch=arch, distro=distro, prefix=prefix)
# (distro)-(codename)-base: Base image from upstream: we sync the repos, but do nothing else.
if (distro, arch) != (('debian', 'stable'), 'amd64'): # debian-stable-base/amd64 already built
base_distro_build(distro, arch)
# (distro)-(codename)-builder: Deb builder image used for building debs; we add the basic tools
# we use to build debs, not including things that should come from the dependencies in the
# debian/control file.
build_tag(prefix + '-builder', arch, """
FROM {prefix}-base/{arch}
RUN apt-get -o=Dpkg::Use-Pty=0 -q update \
&& apt-get -o=Dpkg::Use-Pty=0 -q dist-upgrade -y \
&& apt-get -o=Dpkg::Use-Pty=0 --no-install-recommends -q install -y \
ccache \
devscripts \
equivs \
g++ \
git \
git-buildpackage \
openssh-client \
{hacks}
""".format(**fmtargs, hacks=hacks.get(prefix + '-builder', '')))
# (distro)-(codename): Basic image we use for most builds. This takes the -builder and adds
# most dependencies found in our packages.
build_tag(prefix, arch, """
FROM {prefix}-builder/{arch}
RUN apt-get -o=Dpkg::Use-Pty=0 -q update \
&& apt-get -o=Dpkg::Use-Pty=0 -q dist-upgrade -y \
&& apt-get -o=Dpkg::Use-Pty=0 --no-install-recommends -q install -y \
automake \
ccache \
cmake \
eatmydata \
g++ \
gdb \
git \
libboost-program-options-dev \
libboost-serialization-dev \
libboost-thread-dev \
libcurl4-openssl-dev \
libevent-dev \
libgtest-dev \
libhidapi-dev \
libjemalloc-dev \
libminiupnpc-dev \
libreadline-dev \
libsodium-dev \
libsqlite3-dev \
libssl-dev \
libsystemd-dev \
libtool \
libunbound-dev \
libunwind8-dev \
libusb-1.0.0-dev \
libuv1-dev \
libzmq3-dev \
lsb-release \
make \
nettle-dev \
ninja-build \
openssh-client \
patch \
pkg-config \
pybind11-dev \
python3-dev \
python3-pip \
python3-pybind11 \
python3-pytest \
python3-setuptools \
qttools5-dev \
{hacks}
""".format(**fmtargs, hacks=hacks.get(prefix, '')))
# Android and flutter builds on top of debian-stable-base and adds a ton of android crap; we
# schedule this job as soon as the debian-sid-base/amd64 build finishes, because they easily take
# the longest and are by far the biggest images.
def android_builds():
build_tag(registry_base + 'android', 'amd64', """
FROM {r}debian-stable-base
RUN /bin/bash -c 'sed -i "s/main/main contrib/g" /etc/apt/sources.list'
RUN apt-get -o=Dpkg::Use-Pty=0 -q update \
&& apt-get -o=Dpkg::Use-Pty=0 -q dist-upgrade -y \
&& apt-get -o=Dpkg::Use-Pty=0 -q install --no-install-recommends -y \
android-sdk \
automake \
ccache \
cmake \
curl \
git \
google-android-ndk-installer \
libtool \
make \
openssh-client \
patch \
pkg-config \
wget \
xz-utils \
zip \
&& git clone https://github.com/Shadowstyler/android-sdk-licenses.git /tmp/android-sdk-licenses \
&& cp -a /tmp/android-sdk-licenses/*-license /usr/lib/android-sdk/licenses \
&& rm -rf /tmp/android-sdk-licenses
""".format(r=registry_base))
build_tag(registry_base + 'flutter', 'amd64', """
FROM {r}android
RUN cd /opt \
&& curl https://storage.googleapis.com/flutter_infra_release/releases/stable/linux/flutter_linux_2.2.2-stable.tar.xz \
| tar xJv \
&& ln -s /opt/flutter/bin/flutter /usr/local/bin/ \
&& flutter precache
""".format(r=registry_base))
# lint is a tiny build (on top of debian-stable-base) with just formatting checking tools
def lint_build():
build_tag(registry_base + 'lint', 'amd64', """
FROM {r}debian-stable-base
RUN apt-get -o=Dpkg::Use-Pty=0 -q install --no-install-recommends -y \
clang-format-11 \
eatmydata \
git \
jsonnet
""".format(r=registry_base))
def nodejs_build():
build_tag(registry_base + 'nodejs', 'amd64', """
FROM node:14.16.1
RUN /bin/bash -c 'echo "man-db man-db/auto-update boolean false" | debconf-set-selections'
RUN apt-get -o=Dpkg::Use-Pty=0 -q update \
&& apt-get -o=Dpkg::Use-Pty=0 -q dist-upgrade -y \
&& apt-get -o=Dpkg::Use-Pty=0 -q install --no-install-recommends -y \
ccache \
cmake \
eatmydata \
g++ \
gdb \
git \
make \
ninja-build \
openssh-client \
patch \
pkg-config \
wine
""")
# Start debian-stable-base/amd64 on its own, because other builds depend on it and we want to get
# those (especially android/flutter) fired off as soon as possible (because it's slow and huge).
if ('debian', 'stable') in distros:
base_distro_build(['debian', 'stable'], 'amd64')
executor = ThreadPoolExecutor(max_workers=max(options.parallel, 1))
if options.distro:
jobs = []
else:
jobs = [executor.submit(b) for b in (android_builds, lint_build, nodejs_build)]
for d in distros:
for a in arches(d):
jobs.append(executor.submit(distro_build, d, a))
while len(jobs):
j = jobs.pop(0)
try:
j.result()
except (ChildProcessError, subprocess.CalledProcessError):
for k in jobs:
k.cancel()
if failure:
print("Error(s) occured, aborting!", file=sys.stderr)
sys.exit(1)
print("\n\n\033[32;1mAll builds finished successfully; pushing manifests...\033[0m\n")
def push_manifest(latest, tags):
if failure:
raise ChildProcessError()
linelock.acquire()
global lineno
myline = lineno
lineno += 1
print()
linelock.release()
subprocess.run(['docker', 'manifest', 'rm', latest], stderr=subprocess.DEVNULL, check=False)
print_line(myline, "\033[33;1mCreating manifest \033[35;1m{}\033[0m".format(latest))
run_or_report('docker', 'manifest', 'create', latest, *tags, myline=myline)
print_line(myline, "\033[33;1mPushing manifest \033[35;1m{}\033[0m".format(latest))
run_or_report('docker', 'manifest', 'push', latest, myline=myline)
print_line(myline, "\033[32;1mFinished manifest \033[35;1m{}\033[0m".format(latest))
for latest, tags in manifests.items():
jobs.append(executor.submit(push_manifest, latest, tags))
while len(jobs):
j = jobs.pop(0)
try:
j.result()
except (ChildProcessError, subprocess.CalledProcessError):
for k in jobs:
k.cancel()
print("\n\n\033[32;1mAll done!\n")

@ -0,0 +1,13 @@
#!/bin/bash
# the registry server to use
registry=$1
test "x$registry" != "x" || exit 1
for file in ${@:2} ; do
name="$(echo $file | cut -d'.' -f1)"
echo "rebuild $name"
docker build -f $file -t $registry/lokinet-ci-$name --pull --no-cache --quiet .
docker push $registry/lokinet-ci-$name
done

@ -0,0 +1,3 @@
FROM ubuntu:bionic
RUN /bin/bash -c 'echo "man-db man-db/auto-update boolean false" | debconf-set-selections'
RUN /bin/bash -c 'apt-get -o=Dpkg::Use-Pty=0 -q update && apt-get -o=Dpkg::Use-Pty=0 -q dist-upgrade -y && apt-get -o=Dpkg::Use-Pty=0 -q --no-install-recommends install -y eatmydata gdb cmake git ninja-build pkg-config ccache g++-8 python3-dev automake libtool autoconf make qttools5-dev file gperf patch openssh-client'

@ -0,0 +1,3 @@
FROM ubuntu:focal
RUN /bin/bash -c 'echo "man-db man-db/auto-update boolean false" | debconf-set-selections'
RUN /bin/bash -c 'apt-get -o=Dpkg::Use-Pty=0 -q update && apt-get -o=Dpkg::Use-Pty=0 -q dist-upgrade -y && apt-get -o=Dpkg::Use-Pty=0 --no-install-recommends -q install -y eatmydata gdb cmake git ninja-build pkg-config ccache g++ libsodium-dev libsystemd-dev python3-dev libuv1-dev libunbound-dev nettle-dev libssl-dev libevent-dev libsqlite3-dev libboost-thread-dev libboost-serialization-dev libboost-program-options-dev libgtest-dev libminiupnpc-dev libunwind8-dev libreadline-dev libhidapi-dev libusb-1.0.0-dev qttools5-dev libcurl4-openssl-dev'

@ -7,8 +7,7 @@ set -o errexit
bad=
if [ "$DRONE_STAGE_OS" == "darwin" ]; then
if otool -L llarp/apple/org.lokinet.network-extension.systemextension/Contents/MacOS/org.lokinet.network-extension | \
grep -Ev '^llarp/apple:|^\t(/usr/lib/lib(System\.|c\+\+|objc))|/System/Library/Frameworks/(CoreFoundation|NetworkExtension|Foundation|Network)\.framework'; then
if otool -L daemon/lokinet | grep -Ev '^daemon/lokinet:|^\t(/usr/lib/libSystem\.|/usr/lib/libc\+\+\.|/System/Library/Frameworks/CoreFoundation)'; then
bad=1
fi
elif [ "$DRONE_STAGE_OS" == "linux" ]; then

@ -1,9 +1,5 @@
#!/usr/bin/env bash
test "x$IGNORE" != "x" && exit 0
. $(dirname $0)/../format-version.sh
repo=$(readlink -e $(dirname $0)/../../)
$CLANG_FORMAT -i $(find $repo/jni $repo/daemon $repo/llarp $repo/include $repo/pybind | grep -E '\.[hc](pp)?$')
jsonnetfmt -i $repo/.drone.jsonnet
clang-format-11 -i $(find $repo/jni $repo/daemon $repo/llarp $repo/include $repo/pybind | grep -E '\.[hc](pp)?$')
git --no-pager diff --exit-code --color || (echo -ne '\n\n\e[31;1mLint check failed; please run ./contrib/format.sh\e[0m\n\n' ; exit 1)

@ -19,15 +19,9 @@ set -o xtrace # Don't start tracing until *after* we write the ssh key
chmod 600 ssh_key
os="$UPLOAD_OS"
if [ -z "$os" ]; then
if [ "$DRONE_STAGE_OS" == "darwin" ]; then
os="macos-$DRONE_STAGE_ARCH"
elif [ -n "$WINDOWS_BUILD_NAME" ]; then
os="windows-$WINDOWS_BUILD_NAME"
else
os="$DRONE_STAGE_OS-$DRONE_STAGE_ARCH"
fi
os="${UPLOAD_OS:-$DRONE_STAGE_OS-$DRONE_STAGE_ARCH}"
if [ -n "$WINDOWS_BUILD_NAME" ]; then
os="windows-$WINDOWS_BUILD_NAME"
fi
if [ -n "$DRONE_TAG" ]; then
@ -40,29 +34,18 @@ else
fi
mkdir -v "$base"
if [ -e build/win32 ]; then
# save debug symbols
cp -av build/win32/daemon/debug-symbols.tar.xz "$base-debug-symbols.tar.xz"
# save installer
cp -av build/win32/*.exe "$base"
if [ -e daemon/lokinet.exe ]; then
cp -av lokinet-*.exe "$base"
# zipit up yo
archive="$base.zip"
zip -r "$archive" "$base"
elif [ -e lokinet.apk ] ; then
elif [ -e build/outputs/apk/debug/lokinet-debug.apk ] ; then
# android af ngl
archive="$base.apk"
cp -av lokinet.apk "$archive"
elif [ -e build-docs ]; then
cp -av build/outputs/apk/debug/lokinet-debug.apk "$base"
archive="$base.tar.xz"
cp -av build-docs/docs/mkdocs.yml build-docs/docs/markdown "$base"
tar cJvf "$archive" "$base"
elif [ -e build-mac ]; then
archive="$base.tar.xz"
mv build-mac/Lokinet*/ "$base"
tar cJvf "$archive" "$base"
else
cp -av build/daemon/lokinet{,-vpn} "$base"
cp -av contrib/bootstrap/mainnet.signed "$base/bootstrap.signed"
cp -av daemon/lokinet daemon/lokinet-vpn ../lokinet-bootstrap "$base"
# tar dat shiz up yo
archive="$base.tar.xz"
tar cJvf "$archive" "$base"
@ -74,7 +57,6 @@ upload_to="oxen.rocks/${DRONE_REPO// /_}/${DRONE_BRANCH// /_}"
# -mkdir a/, -mkdir a/b/, -mkdir a/b/c/, ... commands. The leading `-` allows the command to fail
# without error.
upload_dirs=(${upload_to//\// })
put_debug=
mkdirs=
dir_tmp=""
for p in "${upload_dirs[@]}"; do
@ -82,15 +64,13 @@ for p in "${upload_dirs[@]}"; do
mkdirs="$mkdirs
-mkdir $dir_tmp"
done
if [ -e "$base-debug-symbols.tar.xz" ] ; then
put_debug="put $base-debug-symbols.tar.xz $upload_to"
fi
sftp -i ssh_key -b - -o StrictHostKeyChecking=off drone@oxen.rocks <<SFTP
$mkdirs
put $archive $upload_to
$put_debug
SFTP
set +o xtrace
echo -e "\n\n\n\n\e[32;1mUploaded to https://${upload_to}/${archive}\e[0m\n\n\n"

@ -1,2 +0,0 @@
[logging]
level=debug

@ -1,5 +0,0 @@
#
# "suggested" default exit node config
#
[network]
exit-node=exit.loki

@ -1,5 +0,0 @@
#
# persist .loki address in a private key file in the data dir
#
[network]
keyfile=lokinet-addr.privkey

@ -1,65 +0,0 @@
#!/bin/bash
#
# helper script for me for when i cross compile
# t. jeff
#
set -e
die() {
echo $@
exit 1
}
platform=${PLATFORM:-Linux}
root="$(readlink -e $(dirname $0)/../)"
cd $root
mkdir -p build-cross
targets=()
cmake_extra=()
while [ "$#" -gt 0 ]; do
if [ "$1" = "--" ]; then
shift
cmake_extra=("$@")
break
fi
targets+=("$1")
shift
done
test ${#targets[@]} = 0 && die no targets provided
archs="${targets[@]}"
echo "all: $archs" > build-cross/Makefile
for arch in $archs ; do
mkdir -p $root/build-cross/build-$arch
cd $root/build-cross/build-$arch
cmake \
-G 'Unix Makefiles' \
-DCROSS_PLATFORM=$platform \
-DCROSS_PREFIX=$arch \
-DCMAKE_EXE_LINKER_FLAGS=-fstack-protector \
-DCMAKE_CXX_FLAGS=-fdiagnostics-color=always \
-DCMAKE_TOOLCHAIN_FILE=$root/contrib/cross/cross.toolchain.cmake \
-DBUILD_STATIC_DEPS=ON \
-DSTATIC_LINK=ON \
-DBUILD_SHARED_LIBS=OFF \
-DBUILD_TESTING=OFF \
-DBUILD_LIBLOKINET=OFF \
-DWITH_TESTS=OFF \
-DNATIVE_BUILD=OFF \
-DSTATIC_LINK=ON \
-DWITH_SYSTEMD=OFF \
-DFORCE_OXENMQ_SUBMODULE=ON \
-DSUBMODULE_CHECK=OFF \
-DWITH_LTO=OFF \
-DWITH_BOOTSTRAP=OFF \
-DCMAKE_BUILD_TYPE=RelWithDeb \
"${cmake_extra[@]}" \
$root
cd $root/build-cross
echo -ne "$arch:\n\t\$(MAKE) -C build-$arch\n" >> $root/build-cross/Makefile
done
cd $root
make -j${JOBS:-$(nproc)} -C build-cross

@ -1,5 +1,7 @@
set(CMAKE_SYSTEM_NAME ${CROSS_PLATFORM})
set(TOOLCHAIN_PREFIX ${CROSS_PREFIX})
set(CMAKE_SYSTEM_NAME Linux)
set(TOOLCHAIN_PREFIX aarch64-linux-gnu)
#set(TOOLCHAIN_SUFFIX)
set(CMAKE_FIND_ROOT_PATH /usr/${TOOLCHAIN_PREFIX})
set(CMAKE_FIND_ROOT_PATH_MODE_PROGRAM NEVER)
@ -8,4 +10,3 @@ set(CMAKE_FIND_ROOT_PATH_MODE_INCLUDE ONLY)
set(CMAKE_C_COMPILER ${TOOLCHAIN_PREFIX}-gcc${TOOLCHAIN_SUFFIX})
set(CMAKE_CXX_COMPILER ${TOOLCHAIN_PREFIX}-g++${TOOLCHAIN_SUFFIX})
set(ARCH_TRIPLET ${TOOLCHAIN_PREFIX})

@ -0,0 +1,12 @@
set(CMAKE_SYSTEM_NAME Linux)
set(TOOLCHAIN_PREFIX arm-linux-gnueabihf)
set(TOOLCHAIN_SUFFIX -8)
set(CMAKE_FIND_ROOT_PATH /usr/${TOOLCHAIN_PREFIX})
set(CMAKE_FIND_ROOT_PATH_MODE_PROGRAM NEVER)
set(CMAKE_FIND_ROOT_PATH_MODE_LIBRARY ONLY)
set(CMAKE_FIND_ROOT_PATH_MODE_INCLUDE ONLY)
set(CMAKE_C_COMPILER ${TOOLCHAIN_PREFIX}-gcc${TOOLCHAIN_SUFFIX})
set(CMAKE_CXX_COMPILER ${TOOLCHAIN_PREFIX}-g++${TOOLCHAIN_SUFFIX})

@ -1,8 +1,4 @@
set(CMAKE_SYSTEM_VERSION 6.0)
# the minimum windows version, set to 6 rn because supporting older windows is hell
set(_winver 0x0600)
add_definitions(-D_WIN32_WINNT=${_winver})
set(CMAKE_SYSTEM_VERSION 5.0)
# target environment on the build host system
# second one is for non-root installs

@ -0,0 +1,12 @@
set(CMAKE_SYSTEM_NAME Linux)
set(TOOLCHAIN_PREFIX powerpc64le-linux-gnu)
set(TOOLCHAIN_SUFFIX -8)
set(CMAKE_FIND_ROOT_PATH /usr/${TOOLCHAIN_PREFIX})
set(CMAKE_FIND_ROOT_PATH_MODE_PROGRAM NEVER)
set(CMAKE_FIND_ROOT_PATH_MODE_LIBRARY ONLY)
set(CMAKE_FIND_ROOT_PATH_MODE_INCLUDE ONLY)
set(CMAKE_C_COMPILER ${TOOLCHAIN_PREFIX}-gcc${TOOLCHAIN_SUFFIX})
set(CMAKE_CXX_COMPILER ${TOOLCHAIN_PREFIX}-g++${TOOLCHAIN_SUFFIX})

@ -0,0 +1,60 @@
<?xml version="1.0"?>
<!DOCTYPE service_bundle SYSTEM "/usr/share/lib/xml/dtd/service_bundle.dtd.1">
<!--
Created by Manifold
--><service_bundle type="manifest" name="lokinet">
<service name="site/lokinet" type="service" version="1">
<dependency name="network" grouping="require_all" restart_on="error" type="service">
<service_fmri value="svc:/milestone/network:default"/>
</dependency>
<dependency name="filesystem" grouping="require_all" restart_on="error" type="service">
<service_fmri value="svc:/system/filesystem/local"/>
</dependency>
<instance name="default" enabled="false">
<method_context>
<method_credential user="lokinet" group="lokinet"/>
</method_context>
<exec_method type="method" name="start" exec="/usr/bin/lokinet %{config_file}" timeout_seconds="60"/>
<exec_method type="method" name="stop" exec="/usr/bin/kill -INT &lt;&lt;&lt; `pgrep lokinet`" timeout_seconds="60"/>
<property_group name="startd" type="framework">
<propval name="duration" type="astring" value="child"/>
<propval name="ignore_error" type="astring" value="core,signal"/>
</property_group>
<property_group name="application" type="application">
<propval name="config_file" type="astring" value="/etc/loki/lokinet.ini"/>
</property_group>
</instance>
<stability value="Evolving"/>
<template>
<common_name>
<loctext xml:lang="C">
LokiNET: Anonymous Network layer thingydoo.
</loctext>
</common_name>
</template>
</service>
</service_bundle>

@ -0,0 +1,18 @@
#!/usr/sbin/dtrace -s
syscall:::entry
/pid == $target/
{
@calls[ustack(10), probefunc] = count();
}
profile:::tick-1sec
{
/** print */
printa(@calls);
/** clear */
clear(@calls);
trunc(@calls, 15);
}

@ -1,19 +0,0 @@
CLANG_FORMAT_DESIRED_VERSION=15
CLANG_FORMAT=$(command -v clang-format-$CLANG_FORMAT_DESIRED_VERSION 2>/dev/null)
if [ $? -ne 0 ]; then
CLANG_FORMAT=$(command -v clang-format-mp-$CLANG_FORMAT_DESIRED_VERSION 2>/dev/null)
fi
if [ $? -ne 0 ]; then
CLANG_FORMAT=$(command -v clang-format 2>/dev/null)
if [ $? -ne 0 ]; then
echo "Please install clang-format version $CLANG_FORMAT_DESIRED_VERSION and re-run this script."
exit 1
fi
version=$(clang-format --version)
if [[ ! $version == *"clang-format version $CLANG_FORMAT_DESIRED_VERSION"* ]]; then
echo "Please install clang-format version $CLANG_FORMAT_DESIRED_VERSION and re-run this script."
exit 1
fi
fi

@ -1,38 +1,24 @@
#!/usr/bin/env bash
. $(dirname $0)/format-version.sh
CLANG_FORMAT_DESIRED_VERSION=11
cd "$(dirname $0)/../"
if [ "$1" = "verify" ] ; then
if [ $($CLANG_FORMAT --output-replacements-xml $(find jni daemon llarp include pybind | grep -E '\.([hc](pp)?|m(m)?)$' | grep -v '#') | grep '</replacement>' | wc -l) -ne 0 ] ; then
exit 2
binary=$(which clang-format-$CLANG_FORMAT_DESIRED_VERSION 2>/dev/null)
if [ $? -ne 0 ]; then
binary=$(which clang-format 2>/dev/null)
if [ $? -ne 0 ]; then
echo "Please install clang-format version $CLANG_FORMAT_DESIRED_VERSION and re-run this script."
exit 1
fi
else
$CLANG_FORMAT -i $(find jni daemon llarp include pybind | grep -E '\.([hc](pp)?|m(m)?)$' | grep -v '#') &> /dev/null
fi
swift_format=$(command -v swiftformat 2>/dev/null)
if [ $? -eq 0 ]; then
if [ "$1" = "verify" ] ; then
for f in $(find daemon | grep -E '\.swift$' | grep -v '#') ; do
if [ $($swift_format --quiet --dryrun < "$f" | diff "$f" - | wc -l) -ne 0 ] ; then
exit 3
fi
done
else
$swift_format --quiet $(find daemon | grep -E '\.swift$' | grep -v '#')
version=$(clang-format --version)
if [[ ! $version == *"clang-format version $CLANG_FORMAT_DESIRED_VERSION"* ]]; then
echo "Please install clang-format version $CLANG_FORMAT_DESIRED_VERSION and re-run this script."
exit 1
fi
fi
jsonnet_format=$(command -v jsonnetfmt 2>/dev/null)
if [ $? -eq 0 ]; then
if [ "$1" = "verify" ]; then
if ! $jsonnet_format --test .drone.jsonnet; then
exit 4
fi
else
$jsonnet_format --in-place .drone.jsonnet
fi
cd "$(dirname $0)/../"
if [ "$1" = "verify" ] ; then
exit $($binary --output-replacements-xml $(find jni daemon llarp include pybind | grep -E '\.[hc](pp)?$' | grep -v '\#') | grep '</replacement>' | wc -l)
else
$binary -i $(find jni daemon llarp include pybind | grep -E '\.[hc](pp)?$' | grep -v '\#') &> /dev/null
fi

@ -0,0 +1,20 @@
#!/bin/sh
. /etc/rc.subr
name=lokinet
rcvar=lokinet_enable
command="/usr/local/bin/${name}"
command_args="/usr/local/etc/${name}/daemon.ini > /dev/null 2>&1"
pidfile="/usr/local/etc/${name}/lokinet.pid"
required_files="/usr/local/etc/${name}/daemon.ini"
sig_reload="HUP"
start_precmd="${command} -g /usr/local/etc/${name}/daemon.ini"
load_rc_config $name
run_rc_command "$1"

@ -1,26 +0,0 @@
#!/usr/bin/env python3
#
# .loki secret key generator script
# makes keyfile contents
#
# usage: python3 keygen.py out.private
# python3 keygen.py > /some/where/over/the/rainbow
#
from nacl.bindings import crypto_sign_keypair
import sys
out = sys.stdout
close_out = lambda : None
args = sys.argv[1:]
if args and args[0] != '-':
out = open(args[0], 'wb')
close_out = out.close
pk, sk = crypto_sign_keypair()
out.write(b'64:')
out.write(sk)
out.flush()
close_out()

@ -1,10 +0,0 @@
cmake_minimum_required(VERSION 3.10)
project(udptest LANGUAGES CXX)
set(CMAKE_CXX_STANDARD 17)
add_executable(udptest udptest.cpp)
include_directories(../../include)
target_link_libraries(udptest PUBLIC lokinet)

@ -1,13 +0,0 @@
# liblokinet examples
building:
$ mkdir -p build
$ cd build
$ cp /path/to/liblokinet.so .
$ cmake .. -DCMAKE_EXE_LINKER_FLAGS='-L.'
$ make
running:
$ ./udptest /path/to/bootstrap.signed

@ -1,239 +0,0 @@
#include <lokinet.h>
#include <signal.h>
#include <memory>
#include <stdexcept>
#include <iostream>
#include <fstream>
#include <sstream>
#include <string>
#include <vector>
#include <cstring>
#include <algorithm>
bool _run{true};
using Lokinet_ptr = std::shared_ptr<lokinet_context>;
[[nodiscard]] auto
MakeLokinet(const std::vector<char>& bootstrap)
{
auto ctx = std::shared_ptr<lokinet_context>(lokinet_context_new(), lokinet_context_free);
if (auto err = lokinet_add_bootstrap_rc(bootstrap.data(), bootstrap.size(), ctx.get()))
throw std::runtime_error{strerror(err)};
if (lokinet_context_start(ctx.get()))
throw std::runtime_error{"could not start context"};
return ctx;
}
void
WaitForReady(const Lokinet_ptr& ctx)
{
while (_run and lokinet_wait_for_ready(1000, ctx.get()))
{
std::cout << "waiting for context..." << std::endl;
}
}
class Flow
{
lokinet_udp_flowinfo const _info;
lokinet_context* const _ctx;
public:
explicit Flow(const lokinet_udp_flowinfo* info, lokinet_context* ctx) : _info{*info}, _ctx{ctx}
{}
lokinet_context*
Context() const
{
return _ctx;
}
std::string
String() const
{
std::stringstream ss;
ss << std::string{_info.remote_host} << ":" << std::to_string(_info.remote_port)
<< " on socket " << _info.socket_id;
return ss.str();
}
};
struct ConnectJob
{
lokinet_udp_flowinfo remote;
lokinet_context* ctx;
};
void
CreateOutboundFlow(void* user, void** flowdata, int* timeout)
{
auto* job = static_cast<ConnectJob*>(user);
Flow* flow = new Flow{&job->remote, job->ctx};
*flowdata = flow;
*timeout = 30;
std::cout << "made outbound flow: " << flow->String() << std::endl;
;
}
int
ProcessNewInboundFlow(void* user, const lokinet_udp_flowinfo* remote, void** flowdata, int* timeout)
{
auto* ctx = static_cast<lokinet_context*>(user);
Flow* flow = new Flow{remote, ctx};
std::cout << "new udp flow: " << flow->String() << std::endl;
*flowdata = flow;
*timeout = 30;
return 0;
}
void
DeleteFlow(const lokinet_udp_flowinfo* remote, void* flowdata)
{
auto* flow = static_cast<Flow*>(flowdata);
std::cout << "udp flow from " << flow->String() << " timed out" << std::endl;
delete flow;
}
void
HandleUDPPacket(const lokinet_udp_flowinfo* remote, const char* pkt, size_t len, void* flowdata)
{
auto* flow = static_cast<Flow*>(flowdata);
std::cout << "we got " << len << " bytes of udp from " << flow->String() << std::endl;
}
void
BounceUDPPacket(const lokinet_udp_flowinfo* remote, const char* pkt, size_t len, void* flowdata)
{
auto* flow = static_cast<Flow*>(flowdata);
std::cout << "bounce " << len << " bytes of udp from " << flow->String() << std::endl;
if (auto err = lokinet_udp_flow_send(remote, pkt, len, flow->Context()))
{
std::cout << "bounce failed: " << strerror(err) << std::endl;
}
}
Lokinet_ptr sender, recip;
void
signal_handler(int)
{
_run = false;
}
int
main(int argc, char* argv[])
{
if (argc == 1)
{
std::cout << "usage: " << argv[0] << " bootstrap.signed" << std::endl;
return 1;
}
/*
signal(SIGINT, signal_handler);
signal(SIGTERM, signal_handler);
*/
std::vector<char> bootstrap;
// load bootstrap.signed
{
std::ifstream inf{argv[1], std::ifstream::ate | std::ifstream::binary};
size_t len = inf.tellg();
inf.seekg(0);
bootstrap.resize(len);
inf.read(bootstrap.data(), bootstrap.size());
}
if (auto* loglevel = getenv("LOKINET_LOG"))
lokinet_log_level(loglevel);
else
lokinet_log_level("none");
std::cout << "starting up" << std::endl;
recip = MakeLokinet(bootstrap);
WaitForReady(recip);
lokinet_udp_bind_result recipBindResult{};
const auto port = 10000;
if (auto err = lokinet_udp_bind(
port,
ProcessNewInboundFlow,
BounceUDPPacket,
DeleteFlow,
recip.get(),
&recipBindResult,
recip.get()))
{
std::cout << "failed to bind recip udp socket " << strerror(err) << std::endl;
return 0;
}
std::cout << "bound recip udp" << std::endl;
sender = MakeLokinet(bootstrap);
WaitForReady(sender);
std::string recipaddr{lokinet_address(recip.get())};
std::cout << "recip ready at " << recipaddr << std::endl;
lokinet_udp_bind_result senderBindResult{};
if (auto err = lokinet_udp_bind(
port,
ProcessNewInboundFlow,
HandleUDPPacket,
DeleteFlow,
sender.get(),
&senderBindResult,
sender.get()))
{
std::cout << "failed to bind sender udp socket " << strerror(err) << std::endl;
return 0;
}
ConnectJob connect{};
connect.remote.socket_id = senderBindResult.socket_id;
connect.remote.remote_port = port;
std::copy_n(recipaddr.c_str(), recipaddr.size(), connect.remote.remote_host);
connect.ctx = sender.get();
std::cout << "bound sender udp" << std::endl;
do
{
std::cout << "try establish to " << connect.remote.remote_host << std::endl;
if (auto err =
lokinet_udp_establish(CreateOutboundFlow, &connect, &connect.remote, sender.get()))
{
std::cout << "failed to establish to recip: " << strerror(err) << std::endl;
usleep(100000);
}
else
break;
} while (true);
std::cout << "sender established" << std::endl;
const std::string buf{"liblokinet"};
const std::string senderAddr{lokinet_address(sender.get())};
do
{
std::cout << senderAddr << " send to remote: " << buf << std::endl;
if (auto err = lokinet_udp_flow_send(&connect.remote, buf.data(), buf.size(), sender.get()))
{
std::cout << "send failed: " << strerror(err) << std::endl;
}
usleep(100000);
} while (_run);
return 0;
}

@ -1,45 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<!-- our size/viewbox is positioned such that 0,0 is the center of the image (to simplify scaling and rotation). -->
<svg version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="-512px" y="-512px"
viewBox="-512 -512 1024 1024" style="enable-background:new -512 -512 1024 1024;" xml:space="preserve">
<style type="text/css">
.bg{fill:#FFFFFF;}
</style>
<!--
Draw the background shape in a 2x2 box (from -1 to 1 in each dimension), then scale it up
(but not all the way to 512, because we want some padding around the outside.
-->
<g transform="scale(415)">
<path class="bg" d="
M 0.5 1
H -0.5
C -0.81,1 -1,0.81 -1,0.5
V -0.5
C -1,-0.81 -0.81,-1 -0.5,-1
H 0.5
C 0.81,-1 1,-0.81 1,-0.5
V 0.5
C 1,0.81 0.81,1 0.5,1
z
"/>
</g>
<g id="shape0">
<!--
Start with a simple 3x2 shape, where each unit we draw corresponds to 1 block edge length in the
final diagram, and shift it so that 2.5x2.5 becomes the new origin (around which we will rotate).
Then we rotate and scale it to the desired size.
We can then copy that at 90, 180, 270 degree rotations to complete the logo.
-->
<g transform="rotate(45) scale(85) translate(-2.5, -2.5)">
<polygon points="0,0 2,0 2,1 1,1 1,2 0,2"/>
<rect x="1" y="2" width="1" height="1"/>
</g>
</g>
<use xlink:href="#shape0" transform="rotate(90)"/>
<use xlink:href="#shape0" transform="rotate(180)"/>
<use xlink:href="#shape0" transform="rotate(270)"/>
</svg>

Before

Width:  |  Height:  |  Size: 1.6 KiB

Some files were not shown because too many files have changed in this diff Show More

Loading…
Cancel
Save