Compare commits

...

25 Commits

Author SHA1 Message Date
Tobias Genannt 879c700bb8
Merge pull request #939 from timrabl/fix-931
Actually fix #471 and #931
2023-02-01 10:08:59 +01:00
Tim Rabl 6f70b88972 squash commits that revert AUTH_LDAP_USER_SEARCH
add missing AUTH_LDAP_USER_SEARCH, removed while modifying....

revert AUTH_LDAP_USER_SEARCH variable that was accidentally removed in #931 and change behaviour to requested features in #471

remove duplicate AUTH_LDAP_USER_SEARCH variable now and fix this finally, hopefully
2023-02-01 08:18:34 +01:00
Tobias Genannt 7a9aef3791
Merge pull request #931 from timrabl/fix-471
implement extra LDAP user and group filters as requested in #471
2023-01-30 10:17:13 +01:00
Tim Oliver Rabl 3071c500da implement extra LDAP user and group filters as requested in #471 2023-01-30 09:06:22 +01:00
Tobias Genannt 350747c1cb
Merge pull request #910 from sc68cal/enforcing_shortname
Prepend docker.io to image URLs
2023-01-28 18:22:19 +01:00
Tobias Genannt 250b1fb093
Merge pull request #933 from tobiasge/fix-gh-warnings
Fix Github action warnings
2023-01-28 18:21:59 +01:00
Tobias Genannt eef45c8197 Using new GITHUB_OUTPUT method 2023-01-28 15:49:13 +01:00
Tobias Genannt f549b93b9d
Merge pull request #932 from tobiasge/gh-token
Using GITHUB_TOKEN for API
2023-01-28 15:31:53 +01:00
Tobias Genannt f2b0375d5b Using GITHUB_TOKEN for API 2023-01-28 13:38:28 +01:00
Tobias Genannt 3202fb9446
Merge pull request #929 from christianharendt/create-redis-username
Add redis username parameter
2023-01-28 10:14:00 +01:00
Christian Harendt ff373bd60d
Update configuration.py 2023-01-27 15:05:17 +01:00
Christian Harendt bd07a7a5a2
Add redis username parameter 2023-01-27 15:02:11 +01:00
Sean M. Collins 41d80d66b1 Prepend docker.io to image URLs
This is to make podman happy, since newer versions of podman
have set short-name-mode to enforcing

https://github.com/containers/image/blob/main/docs/containers-registries.conf.5.md
2023-01-10 14:10:15 +01:00
Tobias Genannt 015e131d99
Merge pull request #914 from kmanwar89/patch-1
Fix syntax of docker compose commands
2023-01-05 09:47:28 +01:00
Tobias Genannt ff37e17eeb
Merge pull request #913 from tobiasge/start_period_explanation
Added start_period to docker-compose example
2023-01-04 09:27:20 +01:00
Tobias Genannt 1403f52d04
Merge pull request #912 from tobiasge/localhost_allowed_host
Ensure that '*' or 'localhost' is always in ALLOWED_HOSTS
2023-01-03 19:06:03 +01:00
Tobias Genannt 7e0a8fee82
Improved comment 2023-01-03 16:48:00 +01:00
Tobias Genannt 0c1b69ded0
Update docker-compose.override.yml.example
Co-authored-by: Christian Mäder <cimnine@users.noreply.github.com>
2023-01-03 16:45:09 +01:00
Tobias Genannt 06e0815c70
Merge pull request #911 from netbox-community/renovate/python3-saml-1.x
Update dependency python3-saml to v1.15.0
2023-01-03 15:12:12 +01:00
Kadar Anwar 8f2820626c
Fix syntax of docker compose commands
Docker compose's syntax changed as of Compose v2 (source: https://docs.docker.com/compose/reference/).  Replaced references of "docker-compose" with "docker compose" to align with this change.
2023-01-03 09:06:46 -05:00
Tobias Genannt b6faad36cb Added start_period to docker-compose example 2023-01-03 14:58:41 +01:00
Tobias Genannt 73f479d5db Ensure that '*' or 'localhost' is always in ALLOWED_HOSTS 2023-01-03 14:41:26 +01:00
renovate[bot] 89ad7588f0
Update dependency python3-saml to v1.15.0 2022-12-27 23:54:03 +00:00
Tobias Genannt a4d986011d
Merge pull request #906 from netbox-community/renovate/django-storages-1.x
Update dependency django-storages to v1.13.2
2022-12-23 16:54:27 +01:00
renovate[bot] f2bb1198dd
Update dependency django-storages to v1.13.2 2022-12-23 05:15:28 +00:00
11 changed files with 70 additions and 38 deletions

View File

@ -55,6 +55,7 @@ jobs:
fail-fast: false fail-fast: false
env: env:
GH_ACTION: enable GH_ACTION: enable
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
IMAGE_NAMES: docker.io/netboxcommunity/netbox IMAGE_NAMES: docker.io/netboxcommunity/netbox
runs-on: ubuntu-latest runs-on: ubuntu-latest
name: Builds new NetBox Docker Images name: Builds new NetBox Docker Images

View File

@ -25,6 +25,7 @@ jobs:
name: Builds new NetBox Docker Images name: Builds new NetBox Docker Images
env: env:
GH_ACTION: enable GH_ACTION: enable
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
IMAGE_NAMES: docker.io/netboxcommunity/netbox quay.io/netboxcommunity/netbox ghcr.io/netbox-community/netbox IMAGE_NAMES: docker.io/netboxcommunity/netbox quay.io/netboxcommunity/netbox ghcr.io/netbox-community/netbox
steps: steps:
- id: source-checkout - id: source-checkout
@ -32,7 +33,7 @@ jobs:
uses: actions/checkout@v3 uses: actions/checkout@v3
- id: set-netbox-docker-version - id: set-netbox-docker-version
name: Get Version of NetBox Docker name: Get Version of NetBox Docker
run: echo "::set-output name=version::$(cat VERSION)" run: echo "version=$(cat VERSION)" >>"$GITHUB_OUTPUT"
shell: bash shell: bash
- id: qemu-setup - id: qemu-setup
name: Set up QEMU name: Set up QEMU

View File

@ -40,8 +40,8 @@ services:
ports: ports:
- 8000:8080 - 8000:8080
EOF EOF
docker-compose pull docker compose pull
docker-compose up docker compose up
``` ```
The whole application will be available after a few minutes. The whole application will be available after a few minutes.
@ -123,7 +123,7 @@ This project relies only on _Docker_ and _docker-compose_ meeting these requirem
* The _containerd version_ must be at least `1.5.6`. * The _containerd version_ must be at least `1.5.6`.
* The _docker-compose version_ must be at least `1.28.0`. * The _docker-compose version_ must be at least `1.28.0`.
To check the version installed on your system run `docker --version` and `docker-compose --version`. To check the version installed on your system run `docker --version` and `docker compose version`.
## Updating ## Updating

View File

@ -19,3 +19,14 @@ gh_env() {
echo "${@}" >>"${GITHUB_ENV}" echo "${@}" >>"${GITHUB_ENV}"
fi fi
} }
###
# Prints the output to the file defined in ${GITHUB_OUTPUT}.
# Only executes if ${GH_ACTION} is defined.
# Example Usage: gh_env "FOO_VAR=bar_value"
###
gh_out() {
if [ -n "${GH_ACTION}" ]; then
echo "${@}" >>"$GITHUB_OUTPUT"
fi
}

View File

@ -1,5 +1,6 @@
#!/bin/bash #!/bin/bash
# Builds the latest released version # Builds the latest released version
source ./build-functions/gh-functions.sh
echo "▶️ $0 $*" echo "▶️ $0 $*"
@ -11,16 +12,20 @@ if ! command -v jq; then
exit 1 exit 1
fi fi
CURL_ARGS=(
--silent
)
### ###
# Checking for the presence of GITHUB_OAUTH_CLIENT_ID # Checking for the presence of GITHUB_TOKEN
# and GITHUB_OAUTH_CLIENT_SECRET
### ###
if [ -n "${GITHUB_OAUTH_CLIENT_ID}" ] && [ -n "${GITHUB_OAUTH_CLIENT_SECRET}" ]; then if [ -n "${GITHUB_TOKEN}" ]; then
echo "🗝 Performing authenticated Github API calls." echo "🗝 Performing authenticated Github API calls."
GITHUB_OAUTH_PARAMS="client_id=${GITHUB_OAUTH_CLIENT_ID}&client_secret=${GITHUB_OAUTH_CLIENT_SECRET}" CURL_ARGS+=(
--header "Authorization: Bearer ${GITHUB_TOKEN}"
)
else else
echo "🕶 Performing unauthenticated Github API calls. This might result in lower Github rate limits!" echo "🕶 Performing unauthenticated Github API calls. This might result in lower Github rate limits!"
GITHUB_OAUTH_PARAMS=""
fi fi
### ###
@ -42,31 +47,27 @@ fi
### ###
ORIGINAL_GITHUB_REPO="netbox-community/netbox" ORIGINAL_GITHUB_REPO="netbox-community/netbox"
GITHUB_REPO="${GITHUB_REPO-$ORIGINAL_GITHUB_REPO}" GITHUB_REPO="${GITHUB_REPO-$ORIGINAL_GITHUB_REPO}"
URL_RELEASES="https://api.github.com/repos/${GITHUB_REPO}/releases?${GITHUB_OAUTH_PARAMS}" URL_RELEASES="https://api.github.com/repos/${GITHUB_REPO}/releases"
# Composing the JQ commans to extract the most recent version number # Composing the JQ commans to extract the most recent version number
JQ_LATEST="group_by(.prerelease) | .[] | sort_by(.published_at) | reverse | .[0] | select(.prerelease==${PRERELEASE-false}) | .tag_name" JQ_LATEST="group_by(.prerelease) | .[] | sort_by(.published_at) | reverse | .[0] | select(.prerelease==${PRERELEASE-false}) | .tag_name"
CURL="curl -sS" CURL="curl"
# Querying the Github API to fetch the most recent version number # Querying the Github API to fetch the most recent version number
VERSION=$($CURL "${URL_RELEASES}" | jq -r "${JQ_LATEST}") VERSION=$($CURL "${CURL_ARGS[@]}" "${URL_RELEASES}" | jq -r "${JQ_LATEST}" 2>/dev/null)
### ###
# Check if the prerelease version is actually higher than stable version # Check if the prerelease version is actually higher than stable version
### ###
if [ "${PRERELEASE}" == "true" ]; then if [ "${PRERELEASE}" == "true" ]; then
JQ_STABLE="group_by(.prerelease) | .[] | sort_by(.published_at) | reverse | .[0] | select(.prerelease==false) | .tag_name" JQ_STABLE="group_by(.prerelease) | .[] | sort_by(.published_at) | reverse | .[0] | select(.prerelease==false) | .tag_name"
STABLE_VERSION=$($CURL "${URL_RELEASES}" | jq -r "${JQ_STABLE}") STABLE_VERSION=$($CURL "${CURL_ARGS[@]}" "${URL_RELEASES}" | jq -r "${JQ_STABLE}" 2>/dev/null)
# shellcheck disable=SC2003 MAJOR_STABLE=$(expr "${STABLE_VERSION}" : 'v\([0-9]\+\)')
MAJOR_STABLE=$(expr match "${STABLE_VERSION}" 'v\([0-9]\+\)') MINOR_STABLE=$(expr "${STABLE_VERSION}" : 'v[0-9]\+\.\([0-9]\+\)')
# shellcheck disable=SC2003 MAJOR_UNSTABLE=$(expr "${VERSION}" : 'v\([0-9]\+\)')
MINOR_STABLE=$(expr match "${STABLE_VERSION}" 'v[0-9]\+\.\([0-9]\+\)') MINOR_UNSTABLE=$(expr "${VERSION}" : 'v[0-9]\+\.\([0-9]\+\)')
# shellcheck disable=SC2003
MAJOR_UNSTABLE=$(expr match "${VERSION}" 'v\([0-9]\+\)')
# shellcheck disable=SC2003
MINOR_UNSTABLE=$(expr match "${VERSION}" 'v[0-9]\+\.\([0-9]\+\)')
if { if {
[ "${MAJOR_STABLE}" -eq "${MAJOR_UNSTABLE}" ] && [ "${MAJOR_STABLE}" -eq "${MAJOR_UNSTABLE}" ] &&
@ -75,10 +76,7 @@ if [ "${PRERELEASE}" == "true" ]; then
echo "❎ Latest unstable version '${VERSION}' is not higher than the latest stable version '$STABLE_VERSION'." echo "❎ Latest unstable version '${VERSION}' is not higher than the latest stable version '$STABLE_VERSION'."
if [ -z "$DEBUG" ]; then if [ -z "$DEBUG" ]; then
if [ -n "${GH_ACTION}" ]; then gh_out "skipped=true"
echo "::set-output name=skipped::true"
fi
exit 0 exit 0
else else
echo "⚠️ Would exit here with code '0', but DEBUG is enabled." echo "⚠️ Would exit here with code '0', but DEBUG is enabled."

View File

@ -170,7 +170,7 @@ if [ "${2}" != "--push-only" ] && [ -z "${SKIP_GIT}" ]; then
REMOTE_EXISTS=$(git ls-remote --heads --tags "${URL}" "${NETBOX_BRANCH}" | wc -l) REMOTE_EXISTS=$(git ls-remote --heads --tags "${URL}" "${NETBOX_BRANCH}" | wc -l)
if [ "${REMOTE_EXISTS}" == "0" ]; then if [ "${REMOTE_EXISTS}" == "0" ]; then
echo "❌ Remote branch '${NETBOX_BRANCH}' not found in '${URL}'; Nothing to do" echo "❌ Remote branch '${NETBOX_BRANCH}' not found in '${URL}'; Nothing to do"
gh_echo "::set-output name=skipped::true" gh_out "skipped=true"
exit 0 exit 0
fi fi
echo "🌐 Checking out '${NETBOX_BRANCH}' of NetBox from the url '${URL}' into '${NETBOX_PATH}'" echo "🌐 Checking out '${NETBOX_BRANCH}' of NetBox from the url '${URL}' into '${NETBOX_PATH}'"
@ -349,10 +349,10 @@ fi
if [ "${SHOULD_BUILD}" != "true" ]; then if [ "${SHOULD_BUILD}" != "true" ]; then
echo "Build skipped because sources didn't change" echo "Build skipped because sources didn't change"
echo "::set-output name=skipped::true" gh_out "skipped=true"
exit 0 # Nothing to do -> exit exit 0 # Nothing to do -> exit
else else
gh_echo "::set-output name=skipped::false" gh_out "skipped=false"
fi fi
gh_echo "::endgroup::" gh_echo "::endgroup::"

View File

@ -58,6 +58,9 @@ _BASE_DIR = dirname(dirname(abspath(__file__)))
# #
# Example: ALLOWED_HOSTS = ['netbox.example.com', 'netbox.internal.local'] # Example: ALLOWED_HOSTS = ['netbox.example.com', 'netbox.internal.local']
ALLOWED_HOSTS = environ.get('ALLOWED_HOSTS', '*').split(' ') ALLOWED_HOSTS = environ.get('ALLOWED_HOSTS', '*').split(' ')
# ensure that '*' or 'localhost' is always in ALLOWED_HOSTS (needed for health checks)
if '*' not in ALLOWED_HOSTS and 'localhost' not in ALLOWED_HOSTS:
ALLOWED_HOSTS.append('localhost')
# PostgreSQL database configuration. See the Django documentation for a complete list of available parameters: # PostgreSQL database configuration. See the Django documentation for a complete list of available parameters:
# https://docs.djangoproject.com/en/stable/ref/settings/#databases # https://docs.djangoproject.com/en/stable/ref/settings/#databases
@ -83,6 +86,7 @@ REDIS = {
'tasks': { 'tasks': {
'HOST': environ.get('REDIS_HOST', 'localhost'), 'HOST': environ.get('REDIS_HOST', 'localhost'),
'PORT': _environ_get_and_map('REDIS_PORT', 6379, _AS_INT), 'PORT': _environ_get_and_map('REDIS_PORT', 6379, _AS_INT),
'USERNAME': environ.get('REDIS_USERNAME', ''),
'PASSWORD': _read_secret('redis_password', environ.get('REDIS_PASSWORD', '')), 'PASSWORD': _read_secret('redis_password', environ.get('REDIS_PASSWORD', '')),
'DATABASE': _environ_get_and_map('REDIS_DATABASE', 0, _AS_INT), 'DATABASE': _environ_get_and_map('REDIS_DATABASE', 0, _AS_INT),
'SSL': _environ_get_and_map('REDIS_SSL', 'False', _AS_BOOL), 'SSL': _environ_get_and_map('REDIS_SSL', 'False', _AS_BOOL),
@ -91,6 +95,7 @@ REDIS = {
'caching': { 'caching': {
'HOST': environ.get('REDIS_CACHE_HOST', environ.get('REDIS_HOST', 'localhost')), 'HOST': environ.get('REDIS_CACHE_HOST', environ.get('REDIS_HOST', 'localhost')),
'PORT': _environ_get_and_map('REDIS_CACHE_PORT', environ.get('REDIS_PORT', '6379'), _AS_INT), 'PORT': _environ_get_and_map('REDIS_CACHE_PORT', environ.get('REDIS_PORT', '6379'), _AS_INT),
'USERNAME': environ.get('REDIS_CACHE_USERNAME', environ.get('REDIS_USERNAME', '')),
'PASSWORD': _read_secret('redis_cache_password', environ.get('REDIS_CACHE_PASSWORD', environ.get('REDIS_PASSWORD', ''))), 'PASSWORD': _read_secret('redis_cache_password', environ.get('REDIS_CACHE_PASSWORD', environ.get('REDIS_PASSWORD', ''))),
'DATABASE': _environ_get_and_map('REDIS_CACHE_DATABASE', '1', _AS_INT), 'DATABASE': _environ_get_and_map('REDIS_CACHE_DATABASE', '1', _AS_INT),
'SSL': _environ_get_and_map('REDIS_CACHE_SSL', environ.get('REDIS_SSL', 'False'), _AS_BOOL), 'SSL': _environ_get_and_map('REDIS_CACHE_SSL', environ.get('REDIS_SSL', 'False'), _AS_BOOL),

View File

@ -61,18 +61,26 @@ LDAP_CA_CERT_FILE = environ.get('LDAP_CA_CERT_FILE', None)
AUTH_LDAP_USER_SEARCH_BASEDN = environ.get('AUTH_LDAP_USER_SEARCH_BASEDN', '') AUTH_LDAP_USER_SEARCH_BASEDN = environ.get('AUTH_LDAP_USER_SEARCH_BASEDN', '')
AUTH_LDAP_USER_SEARCH_ATTR = environ.get('AUTH_LDAP_USER_SEARCH_ATTR', 'sAMAccountName') AUTH_LDAP_USER_SEARCH_ATTR = environ.get('AUTH_LDAP_USER_SEARCH_ATTR', 'sAMAccountName')
AUTH_LDAP_USER_SEARCH_FILTER: str = environ.get(
'AUTH_LDAP_USER_SEARCH_FILTER', f'({AUTH_LDAP_USER_SEARCH_ATTR}=%(user)s)'
)
AUTH_LDAP_USER_SEARCH = LDAPSearch( AUTH_LDAP_USER_SEARCH = LDAPSearch(
AUTH_LDAP_USER_SEARCH_BASEDN, AUTH_LDAP_USER_SEARCH_BASEDN, ldap.SCOPE_SUBTREE, AUTH_LDAP_USER_SEARCH_FILTER
ldap.SCOPE_SUBTREE,
"(" + AUTH_LDAP_USER_SEARCH_ATTR + "=%(user)s)"
) )
# This search ought to return all groups to which the user belongs. django_auth_ldap uses this to determine group # This search ought to return all groups to which the user belongs. django_auth_ldap uses this to determine group
# heirarchy. # heirarchy.
AUTH_LDAP_GROUP_SEARCH_BASEDN = environ.get('AUTH_LDAP_GROUP_SEARCH_BASEDN', '') AUTH_LDAP_GROUP_SEARCH_BASEDN = environ.get('AUTH_LDAP_GROUP_SEARCH_BASEDN', '')
AUTH_LDAP_GROUP_SEARCH_CLASS = environ.get('AUTH_LDAP_GROUP_SEARCH_CLASS', 'group') AUTH_LDAP_GROUP_SEARCH_CLASS = environ.get('AUTH_LDAP_GROUP_SEARCH_CLASS', 'group')
AUTH_LDAP_GROUP_SEARCH = LDAPSearch(AUTH_LDAP_GROUP_SEARCH_BASEDN, ldap.SCOPE_SUBTREE,
"(objectClass=" + AUTH_LDAP_GROUP_SEARCH_CLASS + ")") AUTH_LDAP_GROUP_SEARCH_FILTER: str = environ.get(
'AUTH_LDAP_GROUP_SEARCH_FILTER', f'(objectclass={AUTH_LDAP_GROUP_SEARCH_CLASS})'
)
AUTH_LDAP_GROUP_SEARCH = LDAPSearch(
AUTH_LDAP_GROUP_SEARCH_BASEDN, ldap.SCOPE_SUBTREE, AUTH_LDAP_GROUP_SEARCH_FILTER
)
AUTH_LDAP_GROUP_TYPE = _import_group_type(environ.get('AUTH_LDAP_GROUP_TYPE', 'GroupOfNamesType')) AUTH_LDAP_GROUP_TYPE = _import_group_type(environ.get('AUTH_LDAP_GROUP_TYPE', 'GroupOfNamesType'))
# Define a group required to login. # Define a group required to login.

View File

@ -3,3 +3,11 @@ services:
netbox: netbox:
ports: ports:
- 8000:8080 - 8000:8080
healthcheck:
# Time for which the health check can fail after the container is started.
# This depends mostly on the performance of your database. On the first start,
# when all tables need to be created the start_period should be higher than on
# subsequent starts. For the first start after major version upgrades of NetBox
# the start_period might also need to be set higher.
# Default value in our docker-compose.yml is 60s
start_period: 90s

View File

@ -1,7 +1,7 @@
version: '3.4' version: '3.4'
services: services:
netbox: &netbox netbox: &netbox
image: netboxcommunity/netbox:${VERSION-v3.4-2.4.0} image: docker.io/netboxcommunity/netbox:${VERSION-v3.4-2.4.0}
depends_on: depends_on:
- postgres - postgres
- redis - redis
@ -47,14 +47,14 @@ services:
# postgres # postgres
postgres: postgres:
image: postgres:15-alpine image: docker.io/postgres:15-alpine
env_file: env/postgres.env env_file: env/postgres.env
volumes: volumes:
- netbox-postgres-data:/var/lib/postgresql/data - netbox-postgres-data:/var/lib/postgresql/data
# redis # redis
redis: redis:
image: redis:7-alpine image: docker.io/redis:7-alpine
command: command:
- sh - sh
- -c # this is to evaluate the $REDIS_PASSWORD from the env - -c # this is to evaluate the $REDIS_PASSWORD from the env

View File

@ -1,6 +1,6 @@
django-auth-ldap==4.1.0 django-auth-ldap==4.1.0
django-storages[azure,boto3,dropbox,google,libcloud,sftp]==1.13.1 django-storages[azure,boto3,dropbox,google,libcloud,sftp]==1.13.2
napalm==4.0.0 napalm==4.0.0
psycopg2==2.9.5 psycopg2==2.9.5
python3-saml==1.14.0 python3-saml==1.15.0
social-auth-core[all]==4.3.0 social-auth-core[all]==4.3.0