From 014acb0d2335744ab97ff73df39ac0b218e54ec9 Mon Sep 17 00:00:00 2001 From: geos_one Date: Thu, 19 Aug 2010 17:37:01 +0000 Subject: [PATCH] add multilib eclasses git-svn-id: https://svn.disconnected-by-peer.at/svn/linamh/trunk/vmware@2303 6952d904-891a-0410-993b-d76249ca496b --- eclass/distutils.eclass | 494 +++++ eclass/gst-plugins-base.eclass | 137 ++ eclass/java-pkg-2.eclass | 182 ++ eclass/java-pkg-opt-2.eclass | 73 + eclass/java-utils-2.eclass | 3489 ++++++++++++++++++++++++++++++++ eclass/linux-mod.eclass | 813 ++++++++ eclass/mozconfig-2.eclass | 67 + eclass/mozconfig-3.eclass | 64 + eclass/multilib-native.eclass | 717 +++++++ eclass/python.eclass | 2609 ++++++++++++++++++++++++ eclass/qt4-build.eclass | 802 ++++++++ eclass/toolchain.eclass | 2494 +++++++++++++++++++++++ eclass/x-modular.eclass | 619 ++++++ eclass/xorg-2.eclass | 413 ++++ 14 files changed, 12973 insertions(+) create mode 100644 eclass/distutils.eclass create mode 100644 eclass/gst-plugins-base.eclass create mode 100644 eclass/java-pkg-2.eclass create mode 100644 eclass/java-pkg-opt-2.eclass create mode 100644 eclass/java-utils-2.eclass create mode 100644 eclass/linux-mod.eclass create mode 100644 eclass/mozconfig-2.eclass create mode 100644 eclass/mozconfig-3.eclass create mode 100644 eclass/multilib-native.eclass create mode 100644 eclass/python.eclass create mode 100644 eclass/qt4-build.eclass create mode 100644 eclass/toolchain.eclass create mode 100644 eclass/x-modular.eclass create mode 100644 eclass/xorg-2.eclass diff --git a/eclass/distutils.eclass b/eclass/distutils.eclass new file mode 100644 index 0000000..f5f1250 --- /dev/null +++ b/eclass/distutils.eclass @@ -0,0 +1,494 @@ +# Copyright 1999-2010 Gentoo Foundation +# Distributed under the terms of the GNU General Public License v2 +# $Header: /var/cvsroot/gentoo-x86/eclass/distutils.eclass,v 1.76 2010/07/17 23:03:29 arfrever Exp $ + +# @ECLASS: distutils.eclass +# @MAINTAINER: +# Gentoo Python Project +# +# Original author: Jon Nelson +# @BLURB: Eclass for packages with build systems using Distutils +# @DESCRIPTION: +# The distutils eclass defines phase functions for packages with build systems using Distutils. + +inherit multilib python + +case "${EAPI:-0}" in + 0|1) + EXPORT_FUNCTIONS src_unpack src_compile src_install pkg_postinst pkg_postrm + ;; + *) + EXPORT_FUNCTIONS src_prepare src_compile src_install pkg_postinst pkg_postrm + ;; +esac + +if [[ -z "$(declare -p PYTHON_DEPEND 2> /dev/null)" ]]; then + if [[ $(number_abis) -gt 1 ]] ; then + DEPEND="dev-lang/python[lib32?]" + else + DEPEND="dev-lang/python" + fi + RDEPEND="${DEPEND}" +fi + +# 'python' variable is deprecated. Use PYTHON() instead. +if has "${EAPI:-0}" 0 1 2 && [[ -z "${SUPPORT_PYTHON_ABIS}" ]]; then + python="python" +else + python="die" +fi + +# @ECLASS-VARIABLE: DISTUTILS_USE_SEPARATE_SOURCE_DIRECTORIES +# @DESCRIPTION: +# Set this to use separate source directories for each enabled version of Python. + +# @ECLASS-VARIABLE: DISTUTILS_SETUP_FILES +# @DESCRIPTION: +# Paths to setup files. + +# @ECLASS-VARIABLE: DISTUTILS_GLOBAL_OPTIONS +# @DESCRIPTION: +# Global options passed to setup files. + +# @ECLASS-VARIABLE: DISTUTILS_SRC_TEST +# @DESCRIPTION: +# Type of test command used by distutils_src_test(). +# IUSE and DEPEND are automatically adjusted, unless DISTUTILS_DISABLE_TEST_DEPENDENCY is set. +# Valid values: +# setup.py +# nosetests +# py.test +# trial [arguments] + +# @ECLASS-VARIABLE: DISTUTILS_DISABLE_TEST_DEPENDENCY +# @DESCRIPTION: +# Disable modification of IUSE and DEPEND caused by setting of DISTUTILS_SRC_TEST. + +if [[ -n "${DISTUTILS_SRC_TEST}" && ! "${DISTUTILS_SRC_TEST}" =~ ^(setup\.py|nosetests|py\.test|trial(\ .*)?)$ ]]; then + die "'DISTUTILS_SRC_TEST' variable has unsupported value '${DISTUTILS_SRC_TEST}'" +fi + +if [[ -z "${DISTUTILS_DISABLE_TEST_DEPENDENCY}" ]]; then + if [[ "${DISTUTILS_SRC_TEST}" == "nosetests" ]]; then + IUSE="test" + DEPEND+="${DEPEND:+ }test? ( dev-python/nose )" + elif [[ "${DISTUTILS_SRC_TEST}" == "py.test" ]]; then + IUSE="test" + DEPEND+="${DEPEND:+ }test? ( dev-python/py )" + # trial requires an argument, which is usually equal to "${PN}". + elif [[ "${DISTUTILS_SRC_TEST}" =~ ^trial(\ .*)?$ ]]; then + IUSE="test" + DEPEND+="${DEPEND:+ }test? ( dev-python/twisted )" + fi +fi + +if [[ -n "${DISTUTILS_SRC_TEST}" ]]; then + EXPORT_FUNCTIONS src_test +fi + +# @ECLASS-VARIABLE: DISTUTILS_DISABLE_VERSIONING_OF_PYTHON_SCRIPTS +# @DESCRIPTION: +# Set this to disable renaming of Python scripts containing versioned shebangs +# and generation of wrapper scripts. + +# @ECLASS-VARIABLE: DISTUTILS_NONVERSIONED_PYTHON_SCRIPTS +# @DESCRIPTION: +# List of paths to Python scripts, relative to ${ED}, which are excluded from +# renaming and generation of wrapper scripts. + +# @ECLASS-VARIABLE: DOCS +# @DESCRIPTION: +# Additional documentation files installed by distutils_src_install(). + +_distutils_get_build_dir() { + if [[ -n "${SUPPORT_PYTHON_ABIS}" && -z "${DISTUTILS_USE_SEPARATE_SOURCE_DIRECTORIES}" ]]; then + echo "build-${PYTHON_ABI}" + else + echo "build" + fi +} + +_distutils_get_PYTHONPATH() { + if [[ -n "${SUPPORT_PYTHON_ABIS}" && -z "${DISTUTILS_USE_SEPARATE_SOURCE_DIRECTORIES}" ]]; then + ls -d build-${PYTHON_ABI}/lib* 2> /dev/null + else + ls -d build/lib* 2> /dev/null + fi +} + +_distutils_hook() { + if [[ "$#" -ne 1 ]]; then + die "${FUNCNAME}() requires 1 argument" + fi + if [[ "$(type -t "distutils_src_${EBUILD_PHASE}_$1_hook")" == "function" ]]; then + "distutils_src_${EBUILD_PHASE}_$1_hook" + fi +} + +# @FUNCTION: distutils_src_unpack +# @DESCRIPTION: +# The distutils src_unpack function. This function is exported. +distutils_src_unpack() { + if ! has "${EAPI:-0}" 0 1; then + die "${FUNCNAME}() cannot be used in this EAPI" + fi + + if [[ "${EBUILD_PHASE}" != "unpack" ]]; then + die "${FUNCNAME}() can be used only in src_unpack() phase" + fi + + unpack ${A} + cd "${S}" + + distutils_src_prepare +} + +# @FUNCTION: distutils_src_prepare +# @DESCRIPTION: +# The distutils src_prepare function. This function is exported. +distutils_src_prepare() { + if ! has "${EAPI:-0}" 0 1 && [[ "${EBUILD_PHASE}" != "prepare" ]]; then + die "${FUNCNAME}() can be used only in src_prepare() phase" + fi + + # Delete ez_setup files to prevent packages from installing Setuptools on their own. + local ez_setup_existence="0" + [[ -d ez_setup || -f ez_setup.py ]] && ez_setup_existence="1" + rm -fr ez_setup* + if [[ "${ez_setup_existence}" == "1" ]]; then + echo "def use_setuptools(*args, **kwargs): pass" > ez_setup.py + fi + + # Delete distribute_setup files to prevent packages from installing Distribute on their own. + local distribute_setup_existence="0" + [[ -d distribute_setup || -f distribute_setup.py ]] && distribute_setup_existence="1" + rm -fr distribute_setup* + if [[ "${distribute_setup_existence}" == "1" ]]; then + echo "def use_setuptools(*args, **kwargs): pass" > distribute_setup.py + fi + + if [[ -n "${DISTUTILS_USE_SEPARATE_SOURCE_DIRECTORIES}" ]]; then + python_copy_sources + fi +} + +# @FUNCTION: distutils_src_compile +# @DESCRIPTION: +# The distutils src_compile function. This function is exported. +# In ebuilds of packages supporting installation for multiple versions of Python, this function +# calls distutils_src_compile_pre_hook() and distutils_src_compile_post_hook(), if they are defined. +distutils_src_compile() { + if [[ "${EBUILD_PHASE}" != "compile" ]]; then + die "${FUNCNAME}() can be used only in src_compile() phase" + fi + + _python_set_color_variables + + if [[ -n "${SUPPORT_PYTHON_ABIS}" ]]; then + distutils_building() { + _distutils_hook pre + + local setup_file + for setup_file in "${DISTUTILS_SETUP_FILES[@]-setup.py}"; do + echo ${_BOLD}"$(PYTHON)" "${setup_file}" "${DISTUTILS_GLOBAL_OPTIONS[@]}" build -b "$(_distutils_get_build_dir)" "$@"${_NORMAL} + "$(PYTHON)" "${setup_file}" "${DISTUTILS_GLOBAL_OPTIONS[@]}" build -b "$(_distutils_get_build_dir)" "$@" || return "$?" + done + + _distutils_hook post + } + python_execute_function ${DISTUTILS_USE_SEPARATE_SOURCE_DIRECTORIES:+-s} distutils_building "$@" + else + local setup_file + for setup_file in "${DISTUTILS_SETUP_FILES[@]-setup.py}"; do + echo ${_BOLD}"$(PYTHON)" "${setup_file}" "${DISTUTILS_GLOBAL_OPTIONS[@]}" build "$@"${_NORMAL} + "$(PYTHON)" "${setup_file}" "${DISTUTILS_GLOBAL_OPTIONS[@]}" build "$@" || die "Building failed" + done + fi +} + +_distutils_src_test_hook() { + if [[ "$#" -ne 1 ]]; then + die "${FUNCNAME}() requires 1 arguments" + fi + + if [[ -z "${SUPPORT_PYTHON_ABIS}" ]]; then + return + fi + + if [[ "$(type -t "distutils_src_test_pre_hook")" == "function" ]]; then + eval "python_execute_$1_pre_hook() { + distutils_src_test_pre_hook + }" + fi + + if [[ "$(type -t "distutils_src_test_post_hook")" == "function" ]]; then + eval "python_execute_$1_post_hook() { + distutils_src_test_post_hook + }" + fi +} + +# @FUNCTION: distutils_src_test +# @DESCRIPTION: +# The distutils src_test function. This function is exported, when DISTUTILS_SRC_TEST variable is set. +# In ebuilds of packages supporting installation for multiple versions of Python, this function +# calls distutils_src_test_pre_hook() and distutils_src_test_post_hook(), if they are defined. +distutils_src_test() { + if [[ "${EBUILD_PHASE}" != "test" ]]; then + die "${FUNCNAME}() can be used only in src_test() phase" + fi + + _python_set_color_variables + + if [[ "${DISTUTILS_SRC_TEST}" == "setup.py" ]]; then + if [[ -n "${SUPPORT_PYTHON_ABIS}" ]]; then + distutils_testing() { + _distutils_hook pre + + local setup_file + for setup_file in "${DISTUTILS_SETUP_FILES[@]-setup.py}"; do + echo ${_BOLD}PYTHONPATH="$(_distutils_get_PYTHONPATH)" "$(PYTHON)" "${setup_file}" "${DISTUTILS_GLOBAL_OPTIONS[@]}" $([[ -z "${DISTUTILS_USE_SEPARATE_SOURCE_DIRECTORIES}" ]] && echo build -b "$(_distutils_get_build_dir)") test "$@"${_NORMAL} + PYTHONPATH="$(_distutils_get_PYTHONPATH)" "$(PYTHON)" "${setup_file}" "${DISTUTILS_GLOBAL_OPTIONS[@]}" $([[ -z "${DISTUTILS_USE_SEPARATE_SOURCE_DIRECTORIES}" ]] && echo build -b "$(_distutils_get_build_dir)") test "$@" || return "$?" + done + + _distutils_hook post + } + python_execute_function ${DISTUTILS_USE_SEPARATE_SOURCE_DIRECTORIES:+-s} distutils_testing "$@" + else + local setup_file + for setup_file in "${DISTUTILS_SETUP_FILES[@]-setup.py}"; do + echo ${_BOLD}PYTHONPATH="$(_distutils_get_PYTHONPATH)" "$(PYTHON)" "${setup_file}" "${DISTUTILS_GLOBAL_OPTIONS[@]}" test "$@"${_NORMAL} + PYTHONPATH="$(_distutils_get_PYTHONPATH)" "$(PYTHON)" "${setup_file}" "${DISTUTILS_GLOBAL_OPTIONS[@]}" test "$@" || die "Testing failed" + done + fi + elif [[ "${DISTUTILS_SRC_TEST}" == "nosetests" ]]; then + _distutils_src_test_hook nosetests + + python_execute_nosetests -P '$(_distutils_get_PYTHONPATH)' ${DISTUTILS_USE_SEPARATE_SOURCE_DIRECTORIES:+-s} -- "$@" + elif [[ "${DISTUTILS_SRC_TEST}" == "py.test" ]]; then + _distutils_src_test_hook py.test + + python_execute_py.test -P '$(_distutils_get_PYTHONPATH)' ${DISTUTILS_USE_SEPARATE_SOURCE_DIRECTORIES:+-s} -- "$@" + # trial requires an argument, which is usually equal to "${PN}". + elif [[ "${DISTUTILS_SRC_TEST}" =~ ^trial(\ .*)?$ ]]; then + local trial_arguments + if [[ "${DISTUTILS_SRC_TEST}" == "trial "* ]]; then + trial_arguments="${DISTUTILS_SRC_TEST#trial }" + else + trial_arguments="${PN}" + fi + + _distutils_src_test_hook trial + + python_execute_trial -P '$(_distutils_get_PYTHONPATH)' ${DISTUTILS_USE_SEPARATE_SOURCE_DIRECTORIES:+-s} -- ${trial_arguments} "$@" + else + die "'DISTUTILS_SRC_TEST' variable has unsupported value '${DISTUTILS_SRC_TEST}'" + fi +} + +# @FUNCTION: distutils_src_install +# @DESCRIPTION: +# The distutils src_install function. This function is exported. +# In ebuilds of packages supporting installation for multiple versions of Python, this function +# calls distutils_src_install_pre_hook() and distutils_src_install_post_hook(), if they are defined. +# It also installs some standard documentation files (AUTHORS, Change*, CHANGELOG, CONTRIBUTORS, +# KNOWN_BUGS, MAINTAINERS, MANIFEST*, NEWS, PKG-INFO, README*, TODO). +distutils_src_install() { + if [[ "${EBUILD_PHASE}" != "install" ]]; then + die "${FUNCNAME}() can be used only in src_install() phase" + fi + + if is_final_abi || (! has_multilib_profile); then + if [ -n "${PYTHON_SLOT_VERSION}" ] ; then + python=python${PYTHON_SLOT_VERSION} + else + python=python + fi + else + [[ -z $(get_abi_var SETARCH_ARCH ${ABI}) ]] && die "SETARCH_ARCH_${ABI} is missing in your portage profile take a look at http://wiki.github.com/sjnewbury/multilib-overlay to get further information" + if [ -n "${PYTHON_SLOT_VERSION}" ] ; then + python="setarch $(get_abi_var SETARCH_ARCH ${ABI}) python${PYTHON_SLOT_VERSION}-${ABI}" + elif [[ -n "${PYTHON}" ]]; then + python="setarch $(get_abi_var SETARCH_ARCH ${ABI}) ${PYTHON}" + else + python="setarch $(get_abi_var SETARCH_ARCH ${ABI}) python" + fi + fi + einfo Using ${python} + + _python_initialize_prefix_variables + _python_set_color_variables + + if [[ -n "${SUPPORT_PYTHON_ABIS}" ]]; then + if [[ -z "${DISTUTILS_DISABLE_VERSIONING_OF_PYTHON_SCRIPTS}" && "${BASH_VERSINFO[0]}" -ge 4 ]]; then + declare -A wrapper_scripts=() + + rename_scripts_with_versioned_shebangs() { + if [[ -d "${ED}usr/bin" ]]; then + cd "${ED}usr/bin" + + local nonversioned_file file + for file in *; do + if [[ -f "${file}" && ! "${file}" =~ [[:digit:]]+\.[[:digit:]](-jython)?+$ && "$(head -n1 "${file}")" =~ ^'#!'.*(python|jython-)[[:digit:]]+\.[[:digit:]]+ ]]; then + for nonversioned_file in "${DISTUTILS_NONVERSIONED_PYTHON_SCRIPTS[@]}"; do + [[ "${nonversioned_file}" == "/usr/bin/${file}" ]] && continue 2 + done + mv "${file}" "${file}-${PYTHON_ABI}" || die "Renaming of '${file}' failed" + wrapper_scripts+=(["${ED}usr/bin/${file}"]=) + fi + done + fi + } + fi + + distutils_installation() { + _distutils_hook pre + + local setup_file + for setup_file in "${DISTUTILS_SETUP_FILES[@]-setup.py}"; do + echo ${_BOLD}"$(PYTHON)" "${setup_file}" "${DISTUTILS_GLOBAL_OPTIONS[@]}" $([[ -z "${DISTUTILS_USE_SEPARATE_SOURCE_DIRECTORIES}" ]] && echo build -b "$(_distutils_get_build_dir)") install --root="${D}" --no-compile "$@"${_NORMAL} + "$(PYTHON)" "${setup_file}" "${DISTUTILS_GLOBAL_OPTIONS[@]}" $([[ -z "${DISTUTILS_USE_SEPARATE_SOURCE_DIRECTORIES}" ]] && echo build -b "$(_distutils_get_build_dir)") install --root="${D}" --no-compile "$@" || return "$?" + done + + if [[ -z "${DISTUTILS_DISABLE_VERSIONING_OF_PYTHON_SCRIPTS}" && "${BASH_VERSINFO[0]}" -ge 4 ]]; then + rename_scripts_with_versioned_shebangs + fi + + _distutils_hook post + } + python_execute_function ${DISTUTILS_USE_SEPARATE_SOURCE_DIRECTORIES:+-s} distutils_installation "$@" + + if [[ -z "${DISTUTILS_DISABLE_VERSIONING_OF_PYTHON_SCRIPTS}" && "${#wrapper_scripts[@]}" -ne 0 && "${BASH_VERSINFO[0]}" -ge 4 ]]; then + python_generate_wrapper_scripts "${!wrapper_scripts[@]}" + fi + unset wrapper_scripts + else + # Mark the package to be rebuilt after a Python upgrade. + python_need_rebuild + + local setup_file + for setup_file in "${DISTUTILS_SETUP_FILES[@]-setup.py}"; do + echo ${_BOLD}"$(PYTHON)" "${setup_file}" "${DISTUTILS_GLOBAL_OPTIONS[@]}" install --root="${D}" --no-compile "$@"${_NORMAL} + "$(PYTHON)" "${setup_file}" "${DISTUTILS_GLOBAL_OPTIONS[@]}" install --root="${D}" --no-compile "$@" || die "Installation failed" + done + fi + + if [[ -e "${ED}usr/local" ]]; then + die "Illegal installation into /usr/local" + fi + + local default_docs + default_docs="AUTHORS Change* CHANGELOG CONTRIBUTORS KNOWN_BUGS MAINTAINERS MANIFEST* NEWS PKG-INFO README* TODO" + + local doc + for doc in ${default_docs}; do + [[ -s "${doc}" ]] && dodoc "${doc}" + done + + if [[ -n "${DOCS}" ]]; then + dodoc ${DOCS} || die "dodoc failed" + fi +} + +# @FUNCTION: distutils_pkg_postinst +# @DESCRIPTION: +# The distutils pkg_postinst function. This function is exported. +# When PYTHON_MODNAME variable is set, then this function calls python_mod_optimize() with modules +# specified in PYTHON_MODNAME variable. Otherwise it calls python_mod_optimize() with module, whose +# name is equal to name of current package, if this module exists. +distutils_pkg_postinst() { + if [[ "${EBUILD_PHASE}" != "postinst" ]]; then + die "${FUNCNAME}() can be used only in pkg_postinst() phase" + fi + + _python_initialize_prefix_variables + + local pylibdir pymod + if [[ -z "$(declare -p PYTHON_MODNAME 2> /dev/null)" ]]; then + for pylibdir in "${EROOT}"usr/$(get_libdir)/python* "${EROOT}"/usr/share/jython-*/Lib; do + if [[ -d "${pylibdir}/site-packages/${PN}" ]]; then + PYTHON_MODNAME="${PN}" + fi + done + fi + + if has "${EAPI:-0}" 0 1 2; then + if is_final_abi || (! has_multilib_profile); then + if [ -n "${PYTHON_SLOT_VERSION}" ] ; then + python=python${PYTHON_SLOT_VERSION} + else + python=python + fi + else + [[ -z $(get_abi_var SETARCH_ARCH ${ABI}) ]] && die "SETARCH_ARCH_${ABI} is missing in your portage profile take a look at http://wiki.github.com/sjnewbury/multilib-overlay to get further information" + if [ -n "${PYTHON_SLOT_VERSION}" ] ; then + python="setarch $(get_abi_var SETARCH_ARCH ${ABI}) python${PYTHON_SLOT_VERSION}-${ABI}" + elif [[ -n "${PYTHON}" ]]; then + python="setarch $(get_abi_var SETARCH_ARCH ${ABI}) ${PYTHON}" + else + python="setarch $(get_abi_var SETARCH_ARCH ${ABI}) python" + fi + fi + else + python="die" + fi + einfo Using ${python} + if [[ -n "${PYTHON_MODNAME}" ]]; then + if ! has "${EAPI:-0}" 0 1 2 || [[ -n "${SUPPORT_PYTHON_ABIS}" ]]; then + python_mod_optimize ${PYTHON_MODNAME} + else + for pymod in ${PYTHON_MODNAME}; do + python_mod_optimize "$(python_get_sitedir)/${pymod}" + done + fi + fi +} + +# @FUNCTION: distutils_pkg_postrm +# @DESCRIPTION: +# The distutils pkg_postrm function. This function is exported. +# When PYTHON_MODNAME variable is set, then this function calls python_mod_cleanup() with modules +# specified in PYTHON_MODNAME variable. Otherwise it calls python_mod_cleanup() with module, whose +# name is equal to name of current package, if this module exists. +distutils_pkg_postrm() { + if [[ "${EBUILD_PHASE}" != "postrm" ]]; then + die "${FUNCNAME}() can be used only in pkg_postrm() phase" + fi + + _python_initialize_prefix_variables + + local pylibdir pymod + if [[ -z "$(declare -p PYTHON_MODNAME 2> /dev/null)" ]]; then + for pylibdir in "${EROOT}"usr/$(get_libdir)/python* "${EROOT}"/usr/share/jython-*/Lib; do + if [[ -d "${pylibdir}/site-packages/${PN}" ]]; then + PYTHON_MODNAME="${PN}" + fi + done + fi + + if [[ -n "${PYTHON_MODNAME}" ]]; then + if ! has "${EAPI:-0}" 0 1 2 || [[ -n "${SUPPORT_PYTHON_ABIS}" ]]; then + python_mod_cleanup ${PYTHON_MODNAME} + else + for pymod in ${PYTHON_MODNAME}; do + for pylibdir in "${EROOT}"usr/$(get_libdir)/python*; do + if [[ -d "${pylibdir}/site-packages/${pymod}" ]]; then + python_mod_cleanup "${pylibdir#${EROOT%/}}/site-packages/${pymod}" + fi + done + done + fi + fi +} + +# Scheduled for deletion on 2011-01-01. +distutils_python_version() { + eerror "Use PYTHON() instead of python variable. Use python_get_*() instead of PYVER* variables." + die "${FUNCNAME}() is banned" +} + +# Scheduled for deletion on 2011-01-01. +distutils_python_tkinter() { + eerror "Use PYTHON_USE_WITH=\"xml\" and python_pkg_setup() instead of ${FUNCNAME}()." + die "${FUNCNAME}() is banned" +} diff --git a/eclass/gst-plugins-base.eclass b/eclass/gst-plugins-base.eclass new file mode 100644 index 0000000..812510e --- /dev/null +++ b/eclass/gst-plugins-base.eclass @@ -0,0 +1,137 @@ +# Copyright 1999-2004 Gentoo Foundation +# Distributed under the terms of the GNU General Public License v2 +# $Header: /var/cvsroot/gentoo-x86/eclass/gst-plugins-base.eclass,v 1.16 2010/03/19 01:20:40 leio Exp $ + +# Author : foser + +# gst-plugins eclass +# +# eclass to make external gst-plugins emergable on a per-plugin basis +# to solve the problem with gst-plugins generating far too much unneeded deps +# +# 3rd party applications using gstreamer now should depend on a set of plugins as +# defined in the source, in case of spider usage obtain recommended plugins to use from +# Gentoo developers responsible for gstreamer , the application developer +# or the gstreamer team. + +inherit eutils gst-plugins10 + + +### +# variable declarations +### + +MY_PN=gst-plugins-base +MY_P=${MY_PN}-${PV} +# All relevant configure options for gst-plugins +# need a better way to extract these +# gst-plugins-base 0.9 +my_gst_plugins_base="x xvideo xshm gst_v4l alsa cdparanoia gnome_vfs +gio libvisual ogg oggtest theora ivorbis vorbis vorbistest examples +freetypetest pango" + +#SRC_URI="mirror://gnome/sources/gst-plugins/${PV_MAJ_MIN}/${MY_P}.tar.bz2" +SRC_URI="http://gstreamer.freedesktop.org/src/gst-plugins-base/${MY_P}.tar.bz2" + +S=${WORKDIR}/${MY_P} + +# added to remove circular deps +# 6/2/2006 - zaheerm +if [ "${PN}" != "${MY_PN}" ]; then +RDEPEND=">=media-libs/gst-plugins-base-${PV}[lib32?]" +DEPEND="${RDEPEND} + ~media-libs/gst-plugins-base-${PV}[lib32?] + >=sys-apps/sed-4 + dev-util/pkgconfig[lib32?]" +RESTRICT=test +fi + +### +# public functions +### + +gst-plugins-base_src_configure() { + + # disable any external plugin besides the plugin we want + local plugin gst_conf + + einfo "Configuring to build ${GST_PLUGINS_BUILD} plugin(s) ..." + + for plugin in ${GST_PLUGINS_BUILD}; do + my_gst_plugins_base=${my_gst_plugins_base/${plugin}/} + done + for plugin in ${my_gst_plugins_base}; do + gst_conf="${gst_conf} --disable-${plugin} " + done + for plugin in ${GST_PLUGINS_BUILD}; do + gst_conf="${gst_conf} --enable-${plugin} " + done + + cd ${S} + econf ${@} --with-package-name="Gentoo GStreamer Ebuild" --with-package-origin="http://www.gentoo.org" ${gst_conf} || die "./configure failure" + +} + +### +# public inheritable functions +### + +gst-plugins-base_src_unpack() { + +# local makefiles + + unpack ${A} + + # Link with the syswide installed gst-libs if needed + gst-plugins10_find_plugin_dir + sed -e "s:\$(top_builddir)/gst-libs/gst/interfaces/libgstinterfaces:${ROOT}/usr/$(get_libdir)/libgstinterfaces:" \ + -e "s:\${top_builddir}/gst-libs/gst/interfaces/libgstinterfaces:${ROOT}/usr/$(get_libdir)/libgstinterfaces:" \ + -e "s:\$(top_builddir)/gst-libs/gst/audio/libgstaudio:${ROOT}/usr/$(get_libdir)/libgstaudio:" \ + -e "s:\${top_builddir}/gst-libs/gst/audio/libgstaudio:${ROOT}/usr/$(get_libdir)/libgstaudio:" \ + -e "s:\$(top_builddir)/gst-libs/gst/cdda/libgstcdda:${ROOT}/usr/$(get_libdir)/libgstcdda:" \ + -e "s:\${top_builddir}/gst-libs/gst/cdda/libgstcdda:${ROOT}/usr/$(get_libdir)/libgstcdda:" \ + -e "s:\$(top_builddir)/gst-libs/gst/riff/libgstriff:${ROOT}/usr/$(get_libdir)/libgstriff:" \ + -e "s:\${top_builddir}/gst-libs/gst/riff/libgstriff:${ROOT}/usr/$(get_libdir)/libgstriff:" \ + -e "s:\$(top_builddir)/gst-libs/gst/tag/libgsttag:${ROOT}/usr/$(get_libdir)/libgsttag:" \ + -e "s:\${top_builddir}/gst-libs/gst/tag/libgsttag:${ROOT}/usr/$(get_libdir)/libgsttag:" \ + -e "s:\$(top_builddir)/gst-libs/gst/video/libgstvideo:${ROOT}/usr/$(get_libdir)/libgstvideo:" \ + -e "s:\${top_builddir}/gst-libs/gst/video/libgstvideo:${ROOT}/usr/$(get_libdir)/libgstvideo:" \ + -e "s:\$(top_builddir)/gst-libs/gst/netbuffer/libgstnetbuffer:${ROOT}/usr/$(get_libdir)/libgstnetbuffer:" \ + -e "s:\${top_builddir}/gst-libs/gst/netbuffer/libgstnetbuffer:${ROOT}/usr/$(get_libdir)/libgstnetbuffer:" \ + -e "s:\$(top_builddir)/gst-libs/gst/rtp/libgstrtp:${ROOT}/usr/$(get_libdir)/libgstrtp:" \ + -e "s:\${top_builddir}/gst-libs/gst/rtp/libgstrtp:${ROOT}/usr/$(get_libdir)/libgstrtp:" \ + -i Makefile.in +# cd ${S} + + # Remove generation of any other Makefiles except the plugin's Makefile +# if [ -d "${S}/sys/${GST_PLUGINS_BUILD_DIR}" ]; then +# makefiles="Makefile sys/Makefile sys/${GST_PLUGINS_BUILD_DIR}/Makefile" +# elif [ -d "${S}/ext/${GST_PLUGINS_BUILD_DIR}" ]; then +# makefiles="Makefile ext/Makefile ext/${GST_PLUGINS_BUILD_DIR}/Makefile" +# fi +# sed -e "s:ac_config_files=.*:ac_config_files='${makefiles}':" \ +# -i ${S}/configure + +} + +gst-plugins-base_src_compile() { + + if [[ ${EAPI:-0} -lt 2 ]]; then + gst-plugins-base_src_configure ${@} + fi + + gst-plugins10_find_plugin_dir + emake || die "compile failure" + +} + +gst-plugins-base_src_install() { + + gst-plugins10_find_plugin_dir + einstall || die + + [[ -e README ]] && dodoc README +} + + +EXPORT_FUNCTIONS src_unpack src_compile src_install diff --git a/eclass/java-pkg-2.eclass b/eclass/java-pkg-2.eclass new file mode 100644 index 0000000..5d8aad8 --- /dev/null +++ b/eclass/java-pkg-2.eclass @@ -0,0 +1,182 @@ +# Eclass for Java packages +# +# Copyright (c) 2004-2005, Thomas Matthijs +# Copyright (c) 2004-2005, Gentoo Foundation +# +# Licensed under the GNU General Public License, v2 +# +# $Header: /var/cvsroot/gentoo-x86/eclass/java-pkg-2.eclass,v 1.35 2010/02/01 09:38:44 caster Exp $ + +inherit java-utils-2 + +# ----------------------------------------------------------------------------- +# @eclass-begin +# @eclass-summary Eclass for Java Packages +# +# This eclass should be inherited for pure Java packages, or by packages which +# need to use Java. +# ----------------------------------------------------------------------------- + +# ------------------------------------------------------------------------------ +# @IUSE +# +# Use JAVA_PKG_IUSE instead of IUSE for doc, source and examples so that +# the eclass can automatically add the needed dependencies for the java-pkg_do* +# functions. +# +# Build Java packages to native libraries +# ------------------------------------------------------------------------------ +IUSE="${JAVA_PKG_IUSE} gcj multislot" + +# ------------------------------------------------------------------------------ +# @depend +# +# Java packages need java-config, and a fairly new release of Portage. +# +# JAVA_PKG_E_DEPEND is defined in java-utils.eclass. +# ------------------------------------------------------------------------------ +DEPEND="${JAVA_PKG_E_DEPEND}" + +# ------------------------------------------------------------------------------ +# @rdepend +# +# Nothing special for RDEPEND... just the same as DEPEND. +# ------------------------------------------------------------------------------ +RDEPEND="${DEPEND}" + +# Commons packages follow the same rules so do it here +if [[ ${CATEGORY} = dev-java && ${PN} = commons-* ]]; then + HOMEPAGE="http://commons.apache.org/${PN#commons-}/" + SRC_URI="mirror://apache/${PN/-///}/source/${P}-src.tar.gz" +fi + +case "${EAPI:-0}" in + 0|1) EXPORT_FUNCTIONS pkg_setup src_compile pkg_preinst ;; + *) EXPORT_FUNCTIONS pkg_setup src_prepare src_compile pkg_preinst ;; +esac + +# ------------------------------------------------------------------------------ +# @eclass-pkg_setup +# +# pkg_setup initializes the Java environment +# ------------------------------------------------------------------------------ +java-pkg-2_pkg_setup() { + java-pkg_init + java-pkg_ensure-test +} + +# ------------------------------------------------------------------------------ +# @eclass-src_prepare +# +# wrapper for java-utils-2_src_prepare +# ------------------------------------------------------------------------------ +java-pkg-2_src_prepare() { + java-utils-2_src_prepare +} + +# ------------------------------------------------------------------------------ +# @eclass-src_compile +# +# Default src_compile for java packages +# variables: +# EANT_BUILD_XML - controls the location of the build.xml (default: ./build.xml) +# EANT_FILTER_COMPILER - Calls java-pkg_filter-compiler with the value +# EANT_BUILD_TARGET - the ant target/targets to execute (default: jar) +# EANT_DOC_TARGET - the target to build extra docs under the doc use flag +# (default: javadoc; declare empty to disable completely) +# EANT_GENTOO_CLASSPATH - @see eant documention in java-utils-2.eclass +# EANT_EXTRA_ARGS - extra arguments to pass to eant +# EANT_ANT_TASKS - modifies the ANT_TASKS variable in the eant environment +# param: Parameters are passed to ant verbatim +# ------------------------------------------------------------------------------ +java-pkg-2_src_compile() { + if [[ -e "${EANT_BUILD_XML:=build.xml}" ]]; then + [[ "${EANT_FILTER_COMPILER}" ]] && \ + java-pkg_filter-compiler ${EANT_FILTER_COMPILER} + local antflags="${EANT_BUILD_TARGET:=jar}" + if hasq doc ${IUSE} && [[ -n "${EANT_DOC_TARGET=javadoc}" ]]; then + antflags="${antflags} $(use_doc ${EANT_DOC_TARGET})" + fi + local tasks + [[ ${EANT_ANT_TASKS} ]] && tasks="${ANT_TASKS} ${EANT_ANT_TASKS}" + ANT_TASKS="${tasks:-${ANT_TASKS}}" \ + eant ${antflags} -f "${EANT_BUILD_XML}" ${EANT_EXTRA_ARGS} "${@}" + else + echo "${FUNCNAME}: ${EANT_BUILD_XML} not found so nothing to do." + fi +} + +java-pkg-2_supports-test() { + python << EOF +from xml.dom.minidom import parse +import sys +dom = parse("${1}") +for elem in dom.getElementsByTagName('target'): + if elem.getAttribute('name') == 'test': + sys.exit(0) +sys.exit(1) +EOF + return $? +} + +java-pkg-2_src_test() { + [[ -e "${EANT_BUILD_XML:=build.xml}" ]] || return + + if [[ ${EANT_TEST_TARGET} ]] || java-pkg-2_supports-test ${EANT_BUILD_XML}; then + local opts task + + if [[ ${EANT_TEST_JUNIT_INTO} ]]; then + java-pkg_jar-from --into "${EANT_TEST_JUNIT_INTO}" junit + fi + + ANT_TASKS=${EANT_TEST_ANT_TASKS:-${ANT_TASKS:-${EANT_ANT_TASKS}}} + + if [[ ${DEPEND} = *dev-java/ant-junit* ]]; then + + if [[ ${ANT_TASKS} && "${ANT_TASKS}" != none ]]; then + ANT_TASKS="${ANT_TASKS} ant-junit" + else + ANT_TASKS="ant-junit" + fi + + task=true + fi + + if [[ ${task} ]] || [[ ${DEPEND} = *dev-java/junit* ]]; then + opts="-Djunit.jar=\"$(java-pkg_getjar junit junit.jar)\"" + if [[ ${EANT_TEST_GENTOO_CLASSPATH} ]]; then + EANT_GENTOO_CLASSPATH="${EANT_TEST_GENTOO_CLASSPATH},junit" + elif [[ ${EANT_GENTOO_CLASSPATH} ]]; then + EANT_GENTOO_CLASSPATH+=',junit' + else + EANT_GENTOO_CLASSPATH=junit + fi + fi + + eant ${opts} -f "${EANT_BUILD_XML}" \ + ${EANT_EXTRA_ARGS} ${EANT_TEST_EXTRA_ARGS} ${EANT_TEST_TARGET:-test} + + else + echo "${FUNCNAME}: No test target in ${EANT_BUILD_XML}" + fi +} + +# ------------------------------------------------------------------------------ +# @eclass-pkg_preinst +# +# wrapper for java-utils-2_pkg_preinst +# ------------------------------------------------------------------------------ +java-pkg-2_pkg_preinst() { + java-utils-2_pkg_preinst +} + +# ------------------------------------------------------------------------------ +# @eclass-pkg_postinst +# ------------------------------------------------------------------------------ +pre_pkg_postinst() { + java-pkg_reg-cachejar_ +} + +# ------------------------------------------------------------------------------ +# @eclass-end +# ------------------------------------------------------------------------------ diff --git a/eclass/java-pkg-opt-2.eclass b/eclass/java-pkg-opt-2.eclass new file mode 100644 index 0000000..760d10f --- /dev/null +++ b/eclass/java-pkg-opt-2.eclass @@ -0,0 +1,73 @@ +# Eclass for optional Java packages +# +# Copyright (c) 2004-2005, Thomas Matthijs +# Copyright (c) 2004-2005, Gentoo Foundation +# +# Licensed under the GNU General Public License, v2 +# +# Major changes: +# 20070805: +# Removed phase hooks because Portage does proper env saving now. +# +# +# $Header: /var/cvsroot/gentoo-x86/eclass/java-pkg-opt-2.eclass,v 1.14 2010/02/01 09:38:44 caster Exp $ + +inherit java-utils-2 + +# ------------------------------------------------------------------------------ +# @eclass-begin +# @eclass-summary Eclass for packages with optional Java support +# ------------------------------------------------------------------------------ + +# ------------------------------------------------------------------------------ +# @ebuild-variable JAVA_PKG_OPT_USE +# +# USE flag to control if optional Java stuff is build. Defaults to 'java'. +# ------------------------------------------------------------------------------ +JAVA_PKG_OPT_USE=${JAVA_PKG_OPT_USE:-java} + +# ------------------------------------------------------------------------------ +# ------------------------------------------------------------------------------ +DEPEND="${JAVA_PKG_OPT_USE}? ( ${JAVA_PKG_E_DEPEND} )" +RDEPEND="${DEPEND}" + +# ------------------------------------------------------------------------------ +# ------------------------------------------------------------------------------ +# See java-pkg-2.eclass for JAVA_PKG_IUSE documentation +IUSE="${JAVA_PKG_IUSE} ${JAVA_PKG_OPT_USE} gcj multislot" + +case "${EAPI:-0}" in + 0|1) EXPORT_FUNCTIONS pkg_setup pkg_preinst ;; + *) EXPORT_FUNCTIONS pkg_setup src_prepare pkg_preinst ;; +esac + +# ------------------------------------------------------------------------------ +# ------------------------------------------------------------------------------ +java-pkg-opt-2_pkg_setup() { + use ${JAVA_PKG_OPT_USE} && java-pkg_init +} + +# ------------------------------------------------------------------------------ +# @eclass-src_prepare +# +# wrapper for java-utils-2_src_prepare +# ------------------------------------------------------------------------------ +java-pkg-opt-2_src_prepare() { + use ${JAVA_PKG_OPT_USE} && java-utils-2_src_prepare +} + +# ------------------------------------------------------------------------------ +# @eclass-pkg_preinst +# +# wrapper for java-utils-2_pkg_preinst +# ------------------------------------------------------------------------------ +java-pkg-opt-2_pkg_preinst() { + use ${JAVA_PKG_OPT_USE} && java-utils-2_pkg_preinst +} + +# ------------------------------------------------------------------------------ +# @eclass-pkg_postinst +# ------------------------------------------------------------------------------ +pre_pkg_postinst() { + java-pkg_reg-cachejar_ +} diff --git a/eclass/java-utils-2.eclass b/eclass/java-utils-2.eclass new file mode 100644 index 0000000..7f88c9b --- /dev/null +++ b/eclass/java-utils-2.eclass @@ -0,0 +1,3489 @@ +# Base eclass for Java packages +# +# Copyright (c) 2004-2005, Thomas Matthijs +# Copyright (c) 2004, Karl Trygve Kalleberg +# Copyright (c) 2004-2005, Gentoo Foundation +# +# Licensed under the GNU General Public License, v2 +# +# $Header: /var/cvsroot/gentoo-x86/eclass/java-utils-2.eclass,v 1.135 2010/04/28 19:40:40 caster Exp $ + +# ----------------------------------------------------------------------------- +# @eclass-begin +# @eclass-shortdesc Java Utility eclass +# @eclass-maintainer java@gentoo.org +# +# This eclass provides functionality which is used by +# java-pkg.eclass and java-pkg-opt.eclass as well as from ebuilds. +# +# @warning +# You probably don't want to inherit this directly from an ebuild. Instead, +# you should inherit java-ant for Ant-based Java packages, java-pkg for other +# Java packages, or java-pkg-opt for packages that have optional Java support. +# +# ----------------------------------------------------------------------------- + +inherit eutils versionator flag-o-matic check-reqs multilib + +IUSE="elibc_FreeBSD" + +# ----------------------------------------------------------------------------- +# @section-begin variables +# @section-title Variables +# +# Summary of variables which control the behavior of building Java packges. +# ----------------------------------------------------------------------------- + +# Make sure we use java-config-2 +export WANT_JAVA_CONFIG="2" + +# ----------------------------------------------------------------------------- +# @variable-external WANT_ANT_TASKS +# @variable-default "" +# +# An $IFS separated list of ant tasks. +# Ebuild can specify this variable before inheriting java-ant-2 eclass to +# determine ANT_TASKS it needs. They will be automatically translated to +# DEPEND variable and ANT_TASKS variable. JAVA_PKG_FORCE_ANT_TASKS can override +# ANT_TASKS set by WANT_ANT_TASKS, but not the DEPEND due to caching. +# Ebuilds that need to depend conditionally on certain tasks and specify them +# differently for different eant calls can't use this simplified approach. +# You also cannot specify version or anything else than ant-*. +# +# @example WANT_ANT_TASKS="ant-junit ant-trax" +# +# @seealso JAVA_PKG_FORCE_ANT_TASKS +# ----------------------------------------------------------------------------- +#WANT_ANT_TASKS + +# ----------------------------------------------------------------------------- +# @variable-internal JAVA_PKG_PORTAGE_DEP +# +# The version of portage we need to function properly. Previously it was +# portage with phase hooks support but now we use a version with proper env +# saving. For EAPI 2 we have new enough stuff so let's have cleaner deps. +# ----------------------------------------------------------------------------- +hasq "${EAPI}" 0 1 && JAVA_PKG_PORTAGE_DEP=">=sys-apps/portage-2.1.2.7" + +# ----------------------------------------------------------------------------- +# @variable-internal JAVA_PKG_E_DEPEND +# +# This is a convience variable to be used from the other java eclasses. This is +# the version of java-config we want to use. Usually the latest stable version +# so that ebuilds can use new features without depending on specific versions. +# ----------------------------------------------------------------------------- +JAVA_PKG_E_DEPEND=">=dev-java/java-config-2.1.9-r1[lib32?] ${JAVA_PKG_PORTAGE_DEP}" +hasq source ${JAVA_PKG_IUSE} && JAVA_PKG_E_DEPEND="${JAVA_PKG_E_DEPEND} source? ( app-arch/zip )" + +# ----------------------------------------------------------------------------- +# @variable-preinherit JAVA_PKG_WANT_BOOTCLASSPATH +# +# The version of bootclasspath the package needs to work. Translates to a proper +# dependency. The bootclasspath has to be obtained by java-ant_rewrite-bootclasspath +# ----------------------------------------------------------------------------- + +if [[ -n "${JAVA_PKG_WANT_BOOTCLASSPATH}" ]]; then + if [[ "${JAVA_PKG_WANT_BOOTCLASSPATH}" == "1.5" ]]; then + JAVA_PKG_E_DEPEND="${JAVA_PKG_E_DEPEND} >=dev-java/gnu-classpath-0.98-r1:0.98" + else + eerror "Unknown value of JAVA_PKG_WANT_BOOTCLASSPATH" + # since die in global scope doesn't work, this will make repoman fail + JAVA_PKG_E_DEPEND="${JAVA_PKG_E_DEPEND} BAD_JAVA_PKG_WANT_BOOTCLASSPATH" + fi +fi + +# ----------------------------------------------------------------------------- +# @variable-external JAVA_PKG_ALLOW_VM_CHANGE +# @variable-default yes +# +# Allow this eclass to change the active VM? +# If your system VM isn't sufficient for the package, the build will fail. +# @note This is useful for testing specific VMs. +# ----------------------------------------------------------------------------- +JAVA_PKG_ALLOW_VM_CHANGE=${JAVA_PKG_ALLOW_VM_CHANGE:="yes"} + +# ----------------------------------------------------------------------------- +# @variable-external JAVA_PKG_FORCE_VM +# +# Explicitly set a particular VM to use. If its not valid, it'll fall back to +# whatever /etc/java-config-2/build/jdk.conf would elect to use. +# +# Should only be used for testing and debugging. +# +# @example Use sun-jdk-1.5 to emerge foo +# JAVA_PKG_FORCE_VM=sun-jdk-1.5 emerge foo +# +# ----------------------------------------------------------------------------- + +# ----------------------------------------------------------------------------- +# @variable-external JAVA_PKG_WANT_SOURCE +# +# Specify a specific VM version to compile for to use for -source. +# Normally this is determined from DEPEND. +# See java-pkg_get-source function below. +# +# Should only be used for testing and debugging. +# +# @seealso java-pkg_get-source +# +# @example Use 1.4 source to emerge baz +# JAVA_PKG_WANT_SOURCE=1.4 emerge baz +# ----------------------------------------------------------------------------- + +# ----------------------------------------------------------------------------- +# @variable-external JAVA_PKG_WANT_TARGET +# +# Same as JAVA_PKG_WANT_SOURCE above but for -target. +# See java-pkg_get-target function below. +# +# Should only be used for testing and debugging. +# +# @seealso java-pkg_get-target +# +# @example emerge bar to be compatible with 1.3 +# JAVA_PKG_WANT_TARGET=1.3 emerge bar +# ----------------------------------------------------------------------------- + +# ----------------------------------------------------------------------------- +# @variable-internal JAVA_PKG_COMPILER_DIR +# @default /usr/share/java-config-2/compiler +# +# Directory where compiler settings are saved, without trailing slash. +# Probably shouldn't touch this variable. +# ----------------------------------------------------------------------------- +JAVA_PKG_COMPILER_DIR=${JAVA_PKG_COMPILER_DIR:="/usr/share/java-config-2/compiler"} + + +# ----------------------------------------------------------------------------- +# @variable-internal JAVA_PKG_COMPILERS_CONF +# @variable-default /etc/java-config-2/build/compilers.conf +# +# Path to file containing information about which compiler to use. +# Can be overloaded, but it should be overloaded for testing. +# ----------------------------------------------------------------------------- +JAVA_PKG_COMPILERS_CONF=${JAVA_PKG_COMPILERS_CONF:="/etc/java-config-2/build/compilers.conf"} + +# ----------------------------------------------------------------------------- +# @variable-external JAVA_PKG_FORCE_COMPILER +# +# Explicitly set a list of compilers to use. This is normally read from +# JAVA_PKG_COMPILERS_CONF. +# +# @note This should only be used internally or for testing. +# @example Use jikes and javac, in that order +# JAVA_PKG_FORCE_COMPILER="jikes javac" +# ----------------------------------------------------------------------------- + +# ----------------------------------------------------------------------------- +# @variable-external JAVA_PKG_FORCE_ANT_TASKS +# +# An $IFS separated list of ant tasks. Can be set in environment before calling +# emerge/ebuild to override variables set in ebuild, mainly for testing before +# putting the resulting (WANT_)ANT_TASKS into ebuild. Affects only ANT_TASKS in +# eant() call, not the dependencies specified in WANT_ANT_TASKS. +# +# @example JAVA_PKG_FORCE_ANT_TASKS="ant-junit ant-trax" \ +# ebuild foo.ebuild compile +# +# @seealso WANT_ANT_TASKS +# ----------------------------------------------------------------------------- + +# TODO document me +JAVA_PKG_QA_VIOLATIONS=0 + +# ----------------------------------------------------------------------------- +# @variable-internal JAVA_PKG_NATIVE_CLASSMAP +# +# Explicitly set classmap.gcjdb database file +# This is set by java-pkg_native_set_env once the libgcj ABI is known. +# Previously, it was externally configurable and defaulted to +# JAVA_PKG_NATIVE_CLASSMAP="/usr/share/java/classmap.gcjdb" +# ----------------------------------------------------------------------------- +JAVA_PKG_NATIVE_CLASSMAP="" + +# ----------------------------------------------------------------------------- +# @variable-external JAVA_PKG_NATIVE_GCJ_PROFILE +# +# Preferred gcc-config profile for building native packages if the java-config +# system VM is _not_ gcj-jdk. If the selected java-config VM profile is +# gcj-jdk this variable is unused and packages are built for the current VM. +# +# @example JAVA_PKG_NATIVE_GCJ_PROFILE="x86_64-pc-linux-gnu-4.3.3" +# @example JAVA_PKG_NATIVE_GCJ_PROFILE="highest" +# ----------------------------------------------------------------------------- + +# ----------------------------------------------------------------------------- +# @variable-internal JAVA_PKG_NATIVE_CACHE_FLAGS +# +# Required flags for native library +# ----------------------------------------------------------------------------- +JAVA_PKG_NATIVE_CACHE_FLAGS="-shared -Wl,-Bsymbolic -fPIC -findirect-dispatch -fjni" + +# ----------------------------------------------------------------------------- +# @variable-internal JAVA_PKG_NATIVE_BIN_FLAGS +# +# Required ldflags for native binary set by java-pkg_native_set_env +# ----------------------------------------------------------------------------- +JAVA_PKG_NATIVE_BIN_FLAGS="" + +# ----------------------------------------------------------------------------- +# @variable-internal JAVA_PKG_NATIVE_INC +# +# Set include files (jar) to compile native code +# This is generated by java-pkg_gen-native-cp +# ----------------------------------------------------------------------------- +JAVA_PKG_NATIVE_INC="" + +# ----------------------------------------------------------------------------- +# @variable-internal JAVA_PKG_NATIVE_LIB +# +# Set library files (jar.so) to link native code +# This is generated by java-pkg_gen-native-cp +# ----------------------------------------------------------------------------- +JAVA_PKG_NATIVE_LIB="" + +# ----------------------------------------------------------------------------- +# @variable-internal JAVA_PKG_NATIVE_SKIP +# +# Jar files that match pattern will be skipped. +# +# @example +# java-pkg_skip-cachejar org.eclipse.jdt.core_ org.eclipse.jdt.apt +# java-pkg_skip-cachejar 2000 org.eclipse.jdt.ui_ +# +# param $1 - optional: memory size to check +# param $@ - pattern of Jar files to skip +# ----------------------------------------------------------------------------- +JAVA_PKG_NATIVE_SKIP="" + +# ----------------------------------------------------------------------------- +# @section-end variables +# ----------------------------------------------------------------------------- + + +# ----------------------------------------------------------------------------- +# @section-begin install +# @section-summary Install functions +# +# These are used to install Java-related things, such as jars, Javadocs, JNI +# libraries, etc. +# ----------------------------------------------------------------------------- + +# ----------------------------------------------------------------------------- +# @ebuild-function java-pkg_doexamples +# +# Installs given arguments to /usr/share/doc/${PF}/examples +# If you give it only one parameter and it is a directory it will install +# everything in that directory to the examples directory. +# +# @example +# java-pkg_doexamples demo +# java-pkg_doexamples demo/* examples/* +# +# @param --subdir - If the examples need a certain directory structure +# @param $* - list of files to install +# ------------------------------------------------------------------------------ +java-pkg_doexamples() { + debug-print-function ${FUNCNAME} $* + + [[ ${#} -lt 1 ]] && die "At least one argument needed" + + java-pkg_check-phase install + + local dest=/usr/share/doc/${PF}/examples + if [[ ${1} == --subdir ]]; then + local dest=${dest}/${2} + dodir ${dest} + shift 2 + fi + + if [[ ${#} = 1 && -d ${1} ]]; then + ( # dont want to pollute calling env + insinto "${dest}" + doins -r ${1}/* + ) || die "Installing examples failed" + else + ( # dont want to pollute calling env + insinto "${dest}" + doins -r "$@" + ) || die "Installing examples failed" + fi +} + +# ----------------------------------------------------------------------------- +# @ebuild-function java-pkg_dojar +# +# Installs any number of jars. +# Jar's will be installed into /usr/share/${PN}(-${SLOT})/lib/ by default. +# You can use java-pkg_jarinto to change this path. +# You should never install a jar with a package version in the filename. +# Instead, use java-pkg_newjar defined below. +# +# @example +# java-pkg_dojar dist/${PN}.jar dist/${PN}-core.jar +# +# @param $* - list of jars to install +# ------------------------------------------------------------------------------ +java-pkg_dojar() { + debug-print-function ${FUNCNAME} $* + + [[ ${#} -lt 1 ]] && die "At least one argument needed" + + java-pkg_check-phase install + java-pkg_init_paths_ + + # Create JARDEST if it doesn't exist + dodir ${JAVA_PKG_JARDEST} + + local jar + # for each jar + for jar in "${@}"; do + local jar_basename=$(basename "${jar}") + + java-pkg_check-versioned-jar ${jar_basename} + + # check if it exists + if [[ -e "${jar}" ]] ; then + # Don't overwrite if jar has already been installed with the same + # name + local dest="${D}${JAVA_PKG_JARDEST}/${jar_basename}" + if [[ -e "${dest}" ]]; then + ewarn "Overwriting ${dest}" + fi + + # install it into JARDEST if it's a non-symlink + if [[ ! -L "${jar}" ]] ; then + #but first check class version when in strict mode. + is-java-strict && java-pkg_verify-classes "${jar}" + + INSDESTTREE="${JAVA_PKG_JARDEST}" \ + doins "${jar}" || die "failed to install ${jar}" + java-pkg_append_ JAVA_PKG_CLASSPATH "${JAVA_PKG_JARDEST}/${jar_basename}" + debug-print "installed ${jar} to ${D}${JAVA_PKG_JARDEST}" + # make a symlink to the original jar if it's symlink + else + # TODO use dosym, once we find something that could use it + # -nichoj + ln -s "$(readlink "${jar}")" "${D}${JAVA_PKG_JARDEST}/${jar_basename}" + debug-print "${jar} is a symlink, linking accordingly" + fi + else + die "${jar} does not exist" + fi + done + + java-pkg_do_write_ + java-pkg_cachejar_ +} + +# ------------------------------------------------------------------------------ +# @internal-function depend-java-query +# +# Wrapper for the depend-java-query binary to enable passing USE in env. +# Using env variables keeps this eclass working with java-config versions that +# do not handle use flags. +# ------------------------------------------------------------------------------ + +depend-java-query() { + # Used to have a which call here but it caused endless loops for some people + # that had some weird bashrc voodoo for which. + USE="${USE}" /usr/bin/depend-java-query "${@}" +} + +# ------------------------------------------------------------------------------ +# @ebuild-function java-pkg_regjar +# +# Records an already installed jar in the package.env +# This would mostly be used if the package has make or a custom script to +# install things. +# +# Example: +# java-pkg_regjar ${D}/opt/foo/lib/foo.jar +# +# WARNING: +# if you want to use shell expansion, you have to use ${D}/... as the for in +# this function will not be able to expand the path, here's an example: +# +# java-pkg_regjar /opt/my-java/lib/*.jar +# +# will not work, because: +# * the `for jar in "$@"` can't expand the path to jar file names, as they +# don't exist yet +# * all `if ...` inside for will fail - the file '/opt/my-java/lib/*.jar' +# doesn't exist +# +# you have to use it as: +# +# java-pkg_regjar ${D}/opt/my-java/lib/*.jar +# +# @param $@ - jars to record +# ------------------------------------------------------------------------------ +# TODO should we be making sure the jar is present on ${D} or wherever? +java-pkg_regjar() { + debug-print-function ${FUNCNAME} $* + + java-pkg_check-phase install + + [[ ${#} -lt 1 ]] && die "at least one argument needed" + + java-pkg_init_paths_ + + local jar jar_dir jar_file + for jar in "${@}"; do + # TODO use java-pkg_check-versioned-jar + if [[ -e "${jar}" || -e "${D}${jar}" ]]; then + [[ -d "${jar}" || -d "${D}${jar}" ]] \ + && die "Called ${FUNCNAME} on a directory $*" + + #check that class version correct when in strict mode + is-java-strict && java-pkg_verify-classes "${jar}" + + # nelchael: we should strip ${D} in this case too, here's why: + # imagine such call: + # java-pkg_regjar ${D}/opt/java/*.jar + # such call will fall into this case (-e ${jar}) and will + # record paths with ${D} in package.env + java-pkg_append_ JAVA_PKG_CLASSPATH "${jar#${D}}" + else + if [[ ${jar} = *\** ]]; then + eerror "The argument ${jar} to ${FUNCNAME}" + eerror "has * in it. If you want it to glob in" + eerror '${D} add ${D} to the argument.' + fi + debug-print "${jar} or ${D}${jar} not found" + die "${jar} does not exist" + fi + done + + java-pkg_do_write_ +} + +# ------------------------------------------------------------------------------ +# @ebuild-function java-pkg_newjar +# +# Installs a jar with a new name +# +# @example: install a versioned jar without the version +# java-pkg_newjar dist/${P}.jar ${PN}.jar +# +# @param $1 - jar to install +# @param $2 - new name for jar - defaults to ${PN}.jar if not specified +# ------------------------------------------------------------------------------ +java-pkg_newjar() { + debug-print-function ${FUNCNAME} $* + + local original_jar="${1}" + local new_jar="${2:-${PN}.jar}" + local new_jar_dest="${T}/${new_jar}" + + [[ -z ${original_jar} ]] && die "Must specify a jar to install" + [[ ! -f ${original_jar} ]] \ + && die "${original_jar} does not exist or is not a file!" + + rm -f "${new_jar_dest}" || die "Failed to remove ${new_jar_dest}" + cp "${original_jar}" "${new_jar_dest}" \ + || die "Failed to copy ${original_jar} to ${new_jar_dest}" + java-pkg_dojar "${new_jar_dest}" +} + +# ------------------------------------------------------------------------------ +# @ebuild-function java-pkg_addcp +# +# Add something to the package's classpath. For jars, you should use dojar, +# newjar, or regjar. This is typically used to add directories to the classpath. +# +# TODO add example +# @param $@ - value to append to JAVA_PKG_CLASSPATH +# ------------------------------------------------------------------------------ +java-pkg_addcp() { + java-pkg_append_ JAVA_PKG_CLASSPATH "${@}" + java-pkg_do_write_ +} + +# ------------------------------------------------------------------------------ +# @ebuild-function java-pkg_doso +# +# Installs any number of JNI libraries +# They will be installed into /usr/lib by default, but java-pkg_sointo +# can be used change this path +# +# Example: +# java-pkg_doso *.so +# +# @param $@ - JNI libraries to install +# ------------------------------------------------------------------------------ +java-pkg_doso() { + debug-print-function ${FUNCNAME} $* + + java-pkg_check-phase install + + [[ ${#} -lt 1 ]] && die "${FUNCNAME} requires at least one argument" + + java-pkg_init_paths_ + + local lib + # for each lib + for lib in "$@" ; do + # if the lib exists... + if [[ -e "${lib}" ]] ; then + # install if it isn't a symlink + if [[ ! -L "${lib}" ]] ; then + INSDESTTREE="${JAVA_PKG_LIBDEST}" \ + INSOPTIONS="${LIBOPTIONS}" \ + doins "${lib}" || die "failed to install ${lib}" + java-pkg_append_ JAVA_PKG_LIBRARY "${JAVA_PKG_LIBDEST}" + debug-print "Installing ${lib} to ${JAVA_PKG_LIBDEST}" + # otherwise make a symlink to the symlink's origin + else + dosym "$(readlink "${lib}")" "${JAVA_PKG_LIBDEST}/${lib##*/}" + debug-print "${lib} is a symlink, linking accordantly" + fi + # otherwise die + else + die "${lib} does not exist" + fi + done + + java-pkg_do_write_ +} + +# ------------------------------------------------------------------------------ +# @ebuild-function java-pkg_regso +# +# Registers an already JNI library in package.env. +# +# Example: +# java-pkg_regso *.so /path/*.so +# +# @param $@ - JNI libraries to register +# ------------------------------------------------------------------------------ +java-pkg_regso() { + debug-print-function ${FUNCNAME} $* + + java-pkg_check-phase install + + [[ ${#} -lt 1 ]] && die "${FUNCNAME} requires at least one argument" + + java-pkg_init_paths_ + + local lib target_dir + for lib in "$@" ; do + # Check the absolute path of the lib + if [[ -e "${lib}" ]] ; then + target_dir="$(java-pkg_expand_dir_ ${lib})" + java-pkg_append_ JAVA_PKG_LIBRARY "/${target_dir#${D}}" + # Check the path of the lib relative to ${D} + elif [[ -e "${D}${lib}" ]]; then + target_dir="$(java-pkg_expand_dir_ ${D}${lib})" + java-pkg_append_ JAVA_PKG_LIBRARY "${target_dir}" + else + die "${lib} does not exist" + fi + done + + java-pkg_do_write_ +} + +# ------------------------------------------------------------------------------ +# @ebuild-function java-pkg_jarinto +# +# Changes the path jars are installed into +# +# @param $1 - new location to install jars into. +# ----------------------------------------------------------------------------- +java-pkg_jarinto() { + debug-print-function ${FUNCNAME} $* + + JAVA_PKG_JARDEST="${1}" +} + +# ------------------------------------------------------------------------------ +# @ebuild-function java-pkg_sointo +# +# Changes the path that JNI libraries are installed into. +# +# @param $1 - new location to install JNI libraries into. +# ------------------------------------------------------------------------------ +java-pkg_sointo() { + debug-print-function ${FUNCNAME} $* + + JAVA_PKG_LIBDEST="${1}" +} + +# ------------------------------------------------------------------------------ +# @ebuild-function java-pkg_dohtml +# +# Install Javadoc HTML documentation +# +# @example +# java-pkg_dohtml dist/docs/ +# +# ------------------------------------------------------------------------------ +java-pkg_dohtml() { + debug-print-function ${FUNCNAME} $* + + [[ ${#} -lt 1 ]] && die "At least one argument required for ${FUNCNAME}" + + # from /usr/lib/portage/bin/dohtml -h + # -f Set list of allowed extensionless file names. + dohtml -f package-list "$@" + + # this probably shouldn't be here but it provides + # a reasonable way to catch # docs for all of the + # old ebuilds. + java-pkg_recordjavadoc +} + +# ------------------------------------------------------------------------------ +# @ebuild-function java-pkg_dojavadoc +# +# Installs javadoc documentation. This should be controlled by the doc use flag. +# +# @param $1: optional --symlink creates to symlink like this for html +# documentation bundles. +# @param $2: - The javadoc root directory. +# +# @example: +# java-pkg_dojavadoc docs/api +# java-pkg_dojavadoc --symlink apidocs docs/api +# +# ------------------------------------------------------------------------------ +java-pkg_dojavadoc() { + debug-print-function ${FUNCNAME} $* + + # For html documentation bundles that link to Javadoc + local symlink + if [[ ${1} = --symlink ]]; then + symlink=${2} + shift 2 + fi + + local dir="$1" + local dest=/usr/share/doc/${PF}/html + + # QA checks + + java-pkg_check-phase install + + [[ -z "${dir}" ]] && die "Must specify a directory!" + [[ ! -d "${dir}" ]] && die "${dir} does not exist, or isn't a directory!" + if [[ ! -e "${dir}/index.html" ]]; then + local msg="No index.html in javadoc directory" + ewarn "${msg}" + is-java-strict && die "${msg}" + fi + + if [[ -e ${D}/${dest}/api ]]; then + eerror "${dest} already exists. Will not overwrite." + die "${dest}" + fi + + # Renaming to match our directory layout + + local dir_to_install="${dir}" + if [[ "$(basename "${dir}")" != "api" ]]; then + dir_to_install="${T}/api" + # TODO use doins + cp -r "${dir}" "${dir_to_install}" || die "cp failed" + fi + + # Actual installation + + java-pkg_dohtml -r "${dir_to_install}" + + # Let's make a symlink to the directory we have everything else under + dosym ${dest}/api "${JAVA_PKG_SHAREPATH}/api" || die + + if [[ ${symlink} ]]; then + debug-print "symlinking ${dest}/{api,${symlink}}" + dosym ${dest}/{api,${symlink}} || die + fi +} + +# ------------------------------------------------------------------------------ +# @ebuild-function java-pkg_dosrc +# +# Installs a zip containing the source for a package, so it can used in +# from IDEs like eclipse and netbeans. +# +# Ebuild needs to DEPEND on app-arch/zip to use this. +# +# It also should be controlled by USE=source. +# +# @example: +# java-pkg_dosrc src/* +# +# ------------------------------------------------------------------------------ +# TODO change so it the arguments it takes are the base directories containing +# source -nichoj +# TODO should we be able to handle multiple calls to dosrc? -nichoj +# TODO maybe we can take an existing zip/jar? -nichoj +# FIXME apparently this fails if you give it an empty directories +java-pkg_dosrc() { + debug-print-function ${FUNCNAME} $* + + [ ${#} -lt 1 ] && die "At least one argument needed" + + java-pkg_check-phase install + + [[ ${#} -lt 1 ]] && die "At least one argument needed" + + if ! [[ ${DEPEND} = *app-arch/zip* ]]; then + local msg="${FUNCNAME} called without app-arch/zip in DEPEND" + java-pkg_announce-qa-violation ${msg} + fi + + java-pkg_init_paths_ + + local zip_name="${PN}-src.zip" + local zip_path="${T}/${zip_name}" + local dir + for dir in "${@}"; do + local dir_parent=$(dirname "${dir}") + local dir_name=$(basename "${dir}") + pushd ${dir_parent} > /dev/null || die "problem entering ${dir_parent}" + zip -q -r ${zip_path} ${dir_name} -i '*.java' + local result=$? + # 12 means zip has nothing to do + if [[ ${result} != 12 && ${result} != 0 ]]; then + die "failed to zip ${dir_name}" + fi + popd >/dev/null + done + + # Install the zip + INSDESTTREE=${JAVA_PKG_SOURCESPATH} \ + doins ${zip_path} || die "Failed to install source" + + JAVA_SOURCES="${JAVA_PKG_SOURCESPATH}/${zip_name}" + java-pkg_do_write_ +} + +# ------------------------------------------------------------------------------ +# @ebuild-function java-pkg_dolauncher +# +# Make a wrapper script to lauch/start this package +# If necessary, the wrapper will switch to the appropriate VM. +# +# Can be called without parameters if the package installs only one jar +# that has the Main-class attribute set. The wrapper will be named ${PN}. +# +# @param $1 - filename of launcher to create +# @param $2 - options, as follows: +# --main the.main.class.too.start +# --jar /the/jar/too/launch.jar or just .jar +# --java_args 'Extra arguments to pass to java' +# --pkg_args 'Extra arguments to pass to the package' +# --pwd Directory the launcher changes to before executing java +# -into Directory to install the launcher to, instead of /usr/bin +# -pre Prepend contents of this file to the launcher +# ------------------------------------------------------------------------------ +java-pkg_dolauncher() { + debug-print-function ${FUNCNAME} $* + + java-pkg_check-phase install + java-pkg_init_paths_ + + if [[ ${#} = 0 ]]; then + local name="${PN}" + else + local name="${1}" + shift + fi + + # TODO rename to launcher + local target="${T}/${name}" + local var_tmp="${T}/launcher_variables_tmp" + local target_dir pre + + # Process the other the rest of the arguments + while [[ -n "${1}" && -n "${2}" ]]; do + local var="${1}" value="${2}" + if [[ "${var:0:2}" == "--" ]]; then + local var=${var:2} + echo "gjl_${var}=\"${value}\"" >> "${var_tmp}" + local gjl_${var}="${value}" + elif [[ "${var}" == "-into" ]]; then + target_dir="${value}" + elif [[ "${var}" == "-pre" ]]; then + pre="${value}" + fi + shift 2 + done + + # Test if no --jar and --main arguments were given and + # in that case check if the package only installs one jar + # and use that jar. + if [[ -z "${gjl_jar}" && -z "${gjl_main}" ]]; then + local cp="${JAVA_PKG_CLASSPATH}" + if [[ "${cp/:}" = "${cp}" && "${cp%.jar}" != "${cp}" ]]; then + echo "gjl_jar=\"${JAVA_PKG_CLASSPATH}\"" >> "${var_tmp}" + else + local msg="Not enough information to create a launcher given." + msg="${msg} Please give --jar or --main argument to ${FUNCNAME}." + die "${msg}" + fi + fi + + # Write the actual script + echo "#!/bin/bash" > "${target}" + if [[ -n "${pre}" ]]; then + if [[ -f "${pre}" ]]; then + cat "${pre}" >> "${target}" + else + die "-pre specified file '${pre}' does not exist" + fi + fi + echo "gjl_package=${JAVA_PKG_NAME}" >> "${target}" + cat "${var_tmp}" >> "${target}" + rm -f "${var_tmp}" + echo "source /usr/share/java-config-2/launcher/launcher.bash" >> "${target}" + + if [[ -n "${target_dir}" ]]; then + DESTTREE="${target_dir}" dobin "${target}" + local ret=$? + return ${ret} + else + dobin "${target}" + fi +} + +# ------------------------------------------------------------------------------ +# Install war files. +# TODO document +# ------------------------------------------------------------------------------ +java-pkg_dowar() { + debug-print-function ${FUNCNAME} $* + + # Check for arguments + [[ ${#} -lt 1 ]] && die "At least one argument needed" + java-pkg_check-phase install + + java-pkg_init_paths_ + + local war + for war in $* ; do + local warpath + # TODO evaluate if we want to handle symlinks differently -nichoj + # Check for symlink + if [[ -L "${war}" ]] ; then + cp "${war}" "${T}" + warpath="${T}$(basename "${war}")" + # Check for directory + # TODO evaluate if we want to handle directories differently -nichoj + elif [[ -d "${war}" ]] ; then + echo "dowar: warning, skipping directory ${war}" + continue + else + warpath="${war}" + fi + + # Install those files like you mean it + INSOPTIONS="-m 0644" \ + INSDESTTREE=${JAVA_PKG_WARDEST} \ + doins ${warpath} + done +} + +# ------------------------------------------------------------------------------ +# @internal-function java-pkg_recordjavadoc +# Scan for JavaDocs, and record their existence in the package.env file +# +# TODO make sure this in the proper section +# ------------------------------------------------------------------------------ +java-pkg_recordjavadoc() +{ + debug-print-function ${FUNCNAME} $* + # the find statement is important + # as some packages include multiple trees of javadoc + JAVADOC_PATH="$(find ${D}/usr/share/doc/ -name allclasses-frame.html -printf '%h:')" + # remove $D - TODO: check this is ok with all cases of the above + JAVADOC_PATH="${JAVADOC_PATH//${D}}" + if [[ -n "${JAVADOC_PATH}" ]] ; then + debug-print "javadocs found in ${JAVADOC_PATH%:}" + java-pkg_do_write_ + else + debug-print "No javadocs found" + fi +} + +# ------------------------------------------------------------------------------ +# @section-end install +# ------------------------------------------------------------------------------ + +# ------------------------------------------------------------------------------ +# @begin-section query +# Use these to build the classpath for building a package. +# ------------------------------------------------------------------------------ + +# ------------------------------------------------------------------------------ +# @ebuild-function java-pkg_jar-from +# +# Makes a symlink to a jar from a certain package +# A lot of java packages include dependencies in a lib/ directory +# You can use this function to replace these bundled dependencies. +# The dependency is recorded into package.env DEPEND line, unless "--build-only" +# is passed as the very first argument, for jars that have to be present only +# at build time and are not needed on runtime (junit testing etc). +# +# Example: get all jars from xerces slot 2 +# java-pkg_jar-from xerces-2 +# Example: get a specific jar from xerces slot 2 +# java-pkg_jar-from xerces-2 xml-apis.jar +# Example: get a specific jar from xerces slot 2, and name it diffrently +# java-pkg_jar-from xerces-2 xml-apis.jar xml.jar +# Example: get junit.jar which is needed only for building +# java-pkg_jar-from --build-only junit junit.jar +# +# @param $opt +# --build-only - makes the jar(s) not added into package.env DEPEND line. +# (assumed automatically when called inside src_test) +# --with-dependencies - get jars also from requested package's dependencies +# transitively. +# --virtual - Packages passed to this function are to be handled as virtuals +# and will not have individual jar dependencies recorded. +# --into $dir - symlink jar(s) into $dir (must exist) instead of . +# @param $1 - Package to get jars from, or comma-separated list of packages in +# case other parameters are not used. +# @param $2 - jar from package. If not specified, all jars will be used. +# @param $3 - When a single jar is specified, destination filename of the +# symlink. Defaults to the name of the jar. +# ------------------------------------------------------------------------------ +# TODO could probably be cleaned up a little +java-pkg_jar-from() { + debug-print-function ${FUNCNAME} $* + + local build_only="" + local destdir="." + local deep="" + local virtual="" + local record_jar="" + + [[ "${EBUILD_PHASE}" == "test" ]] && build_only="build" + + while [[ "${1}" == --* ]]; do + if [[ "${1}" = "--build-only" ]]; then + build_only="build" + elif [[ "${1}" = "--with-dependencies" ]]; then + deep="--with-dependencies" + elif [[ "${1}" = "--virtual" ]]; then + virtual="true" + elif [[ "${1}" = "--into" ]]; then + destdir="${2}" + shift + else + die "java-pkg_jar-from called with unknown parameter: ${1}" + fi + shift + done + + local target_pkg="${1}" target_jar="${2}" destjar="${3}" + + [[ -z ${target_pkg} ]] && die "Must specify a package" + + if [[ "${EAPI}" == "1" ]]; then + target_pkg="${target_pkg//:/-}" + fi + + # default destjar to the target jar + [[ -z "${destjar}" ]] && destjar="${target_jar}" + + local error_msg="There was a problem getting the classpath for ${target_pkg}." + local classpath + classpath="$(java-config ${deep} --classpath=${target_pkg})" + [[ $? != 0 ]] && die ${error_msg} + + # When we have commas this functions is called to bring jars from multiple + # packages. This affects recording of dependencencies performed later + # which expects one package only, so we do it here. + if [[ ${target_pkg} = *,* ]]; then + for pkg in ${target_pkg//,/ }; do + java-pkg_ensure-dep "${build_only}" "${pkg}" + [[ -z "${build_only}" ]] && java-pkg_record-jar_ "${pkg}" + done + # setting this disables further record-jar_ calls later + record_jar="true" + else + java-pkg_ensure-dep "${build_only}" "${target_pkg}" + fi + + # Record the entire virtual as a dependency so that + # no jars are missed. + if [[ -z "${build_only}" && -n "${virtual}" ]]; then + java-pkg_record-jar_ "${target_pkg}" + # setting this disables further record-jars_ calls later + record_jar="true" + fi + + pushd ${destdir} > /dev/null \ + || die "failed to change directory to ${destdir}" + + local jar + for jar in ${classpath//:/ }; do + local jar_name=$(basename "${jar}") + if [[ ! -f "${jar}" ]] ; then + debug-print "${jar} from ${target_pkg} does not exist" + die "Installation problems with jars in ${target_pkg} - is it installed?" + fi + # If no specific target jar was indicated, link it + if [[ -z "${target_jar}" ]] ; then + [[ -f "${target_jar}" ]] && rm "${target_jar}" + ln -snf "${jar}" \ + || die "Failed to make symlink from ${jar} to ${jar_name}" + if [[ -z "${record_jar}" ]]; then + if [[ -z "${build_only}" ]]; then + java-pkg_record-jar_ "${target_pkg}" "${jar}" + else + java-pkg_record-jar_ --build-only "${target_pkg}" "${jar}" + fi + fi + # otherwise, if the current jar is the target jar, link it + elif [[ "${jar_name}" == "${target_jar}" ]] ; then + [[ -f "${destjar}" ]] && rm "${destjar}" + ln -snf "${jar}" "${destjar}" \ + || die "Failed to make symlink from ${jar} to ${destjar}" + if [[ -z "${record_jar}" ]]; then + if [[ -z "${build_only}" ]]; then + java-pkg_record-jar_ "${target_pkg}" "${jar}" + else + java-pkg_record-jar_ --build-only "${target_jar}" "${jar}" + fi + fi + popd > /dev/null + return 0 + fi + done + popd > /dev/null + # if no target was specified, we're ok + if [[ -z "${target_jar}" ]] ; then + return 0 + # otherwise, die bitterly + else + die "Failed to find ${target_jar:-jar} in ${target_pkg}" + fi +} + +# ------------------------------------------------------------------------------ +# @ebuild-function java-pkg_jarfrom +# +# See java-pkg_jar-from +# ------------------------------------------------------------------------------ +java-pkg_jarfrom() { + java-pkg_jar-from "$@" +} + +# ------------------------------------------------------------------------------ +# @ebuild-function java-pkg_getjars +# +# Get the classpath provided by any number of packages +# Among other things, this can be passed to 'javac -classpath' or 'ant -lib'. +# The providing packages are recorded as dependencies into package.env DEPEND +# line, unless "--build-only" is passed as the very first argument, for jars +# that have to be present only at build time and are not needed on runtime +# (junit testing etc). +# +# Example: Get the classpath for xerces-2 and xalan, +# java-pkg_getjars xerces-2,xalan +# Example Return: +# /usr/share/xerces-2/lib/xml-apis.jar:/usr/share/xerces-2/lib/xmlParserAPIs.jar:/usr/share/xalan/lib/xalan.jar +# +# @param $opt +# --build-only - makes the jar(s) not added into package.env DEPEND line. +# (assumed automatically when called inside src_test) +# --with-dependencies - get jars also from requested package's dependencies +# transitively. +# @param $1 - list of packages to get jars from +# (passed to java-config --classpath) +# ------------------------------------------------------------------------------ +java-pkg_getjars() { + debug-print-function ${FUNCNAME} $* + + local build_only="" + local deep="" + + [[ "${EBUILD_PHASE}" == "test" ]] && build_only="build" + + while [[ "${1}" == --* ]]; do + if [[ "${1}" = "--build-only" ]]; then + build_only="build" + elif [[ "${1}" = "--with-dependencies" ]]; then + deep="--with-dependencies" + else + die "java-pkg_jar-from called with unknown parameter: ${1}" + fi + shift + done + + [[ ${#} -ne 1 ]] && die "${FUNCNAME} takes only one argument besides --*" + + + local pkgs="${1}" + + if [[ "${EAPI}" == "1" ]]; then + pkgs="${pkgs//:/-}" + fi + + jars="$(java-config ${deep} --classpath=${pkgs})" + [[ $? != 0 ]] && die "java-config --classpath=${pkgs} failed" + debug-print "${pkgs}:${jars}" + + for pkg in ${pkgs//,/ }; do + java-pkg_ensure-dep "${build_only}" "${pkg}" + done + + for pkg in ${pkgs//,/ }; do + if [[ -z "${build_only}" ]]; then + java-pkg_record-jar_ "${pkg}" + else + java-pkg_record-jar_ --build-only "${pkg}" + fi + done + + echo "${jars}" +} + +# ------------------------------------------------------------------------------ +# @ebuild-function java-pkg_getjar +# +# Get the filename of a single jar from a package +# The providing package is recorded as runtime dependency into package.env +# DEPEND line, unless "--build-only" is passed as the very first argument, for +# jars that have to be present only at build time and are not needed on runtime +# (junit testing etc). +# +# @example +# java-pkg_getjar xerces-2 xml-apis.jar +# @example-return +# /usr/share/xerces-2/lib/xml-apis.jar +# +# @param $opt +# --build-only - makes the jar not added into package.env DEPEND line. +# --virtual - Packages passed to this function are to be handled as virtuals +# and will not have individual jar dependencies recorded. +# @param $1 - package to use +# @param $2 - jar to get +# ------------------------------------------------------------------------------ +java-pkg_getjar() { + debug-print-function ${FUNCNAME} $* + + local build_only="" + local virtual="" + local record_jar="" + + [[ "${EBUILD_PHASE}" == "test" ]] && build_only="build" + + while [[ "${1}" == --* ]]; do + if [[ "${1}" = "--build-only" ]]; then + build_only="build" + elif [[ "${1}" == "--virtual" ]]; then + virtual="true" + else + die "java-pkg_getjar called with unknown parameter: ${1}" + fi + shift + done + + [[ ${#} -ne 2 ]] && die "${FUNCNAME} takes only two arguments besides --*" + + local pkg="${1}" target_jar="${2}" jar + + if [[ "${EAPI}" == "1" ]]; then + pkg="${pkg//:/-}" + fi + + [[ -z ${pkg} ]] && die "Must specify package to get a jar from" + [[ -z ${target_jar} ]] && die "Must specify jar to get" + + local error_msg="Could not find classpath for ${pkg}. Are you sure its installed?" + local classpath + classpath=$(java-config --classpath=${pkg}) + [[ $? != 0 ]] && die ${error_msg} + + java-pkg_ensure-dep "${build_only}" "${pkg}" + + # Record the package(Virtual) as a dependency and then set build_only + # So that individual jars are not recorded. + if [[ -n "${virtual}" ]]; then + if [[ -z "${build_only}" ]]; then + java-pkg_record-jar_ "${pkg}" + else + java-pkg_record-jar_ --build-only "${pkg}" + fi + record_jar="true" + fi + + for jar in ${classpath//:/ }; do + if [[ ! -f "${jar}" ]] ; then + die "Installation problem with jar ${jar} in ${pkg} - is it installed?" + fi + + if [[ "$(basename ${jar})" == "${target_jar}" ]] ; then + # Only record jars that aren't build-only + if [[ -z "${record_jar}" ]]; then + if [[ -z "${build_only}" ]]; then + java-pkg_record-jar_ "${pkg}" "${jar}" + else + java-pkg_record-jar_ --build-only "${pkg}" "${jar}" + fi + fi + echo "${jar}" + return 0 + fi + done + + die "Could not find ${target_jar} in ${pkg}" + return 1 +} + +# ------------------------------------------------------------------------------ +# @ebuild-function java-pkg_register-dependency +# +# Registers runtime dependency on a package, list of packages, or a single jar +# from a package, into package.env DEPEND line. Can only be called in +# src_install phase. +# Intended for binary packages where you don't need to symlink the jars or get +# their classpath during build. As such, the dependencies only need to be +# specified in ebuild's RDEPEND, and should be omitted in DEPEND. +# +# @param $1 - comma-separated list of packages, or a single package +# @param $2 - if param $1 is a single package, optionally specify the jar +# to depend on +# +# Example: Record the dependency on whole xerces-2 and xalan, +# java-pkg_register-dependency xerces-2,xalan +# Example: Record the dependency on ant.jar from ant-core +# java-pkg_register-dependency ant-core ant.jar +# +# Note: Passing both list of packages as the first parameter AND specifying the +# jar as the second is not allowed and will cause the function to die. We assume +# that there's more chance one passes such combination as a mistake, than that +# there are more packages providing identically named jar without class +# collisions. +# ------------------------------------------------------------------------------ +java-pkg_register-dependency() { + debug-print-function ${FUNCNAME} $* + + java-pkg_check-phase install + + [[ ${#} -gt 2 ]] && die "${FUNCNAME} takes at most two arguments" + + local pkgs="${1}" + local jar="${2}" + + [[ -z "${pkgs}" ]] && die "${FUNCNAME} called with no package(s) specified" + + if [[ "${EAPI}" == "1" ]]; then + pkgs="${pkgs//:/-}" + fi + + if [[ -z "${jar}" ]]; then + for pkg in ${pkgs//,/ }; do + java-pkg_ensure-dep runtime "${pkg}" + java-pkg_record-jar_ "${pkg}" + done + else + [[ ${pkgs} == *,* ]] && \ + die "${FUNCNAME} called with both package list and jar name" + java-pkg_ensure-dep runtime "${pkgs}" + java-pkg_record-jar_ "${pkgs}" "${jar}" + fi + + java-pkg_do_write_ +} + +# ------------------------------------------------------------------------------ +# @ebuild-function java-pkg_register-optional-dependency +# +# Registers optional runtime dependency on a package, list of packages, or a +# single jar from a package, into package.env OPTIONAL_DEPEND line. Can only be +# called in src_install phase. +# Intended for packages that can use other packages when those are in classpath. +# Will be put on classpath by launcher if they are installed. Typical case is +# JDBC implementations for various databases. It's better than having USE flag +# for each implementation triggering hard dependency. +# +# @param $1 - comma-separated list of packages, or a single package +# @param $2 - if param $1 is a single package, optionally specify the jar +# to depend on +# +# Example: Record the optional dependency on some jdbc providers +# java-pkg_register-optional-dependency jdbc-jaybird,jtds-1.2,jdbc-mysql +# +# Note: Passing both list of packages as the first parameter AND specifying the +# jar as the second is not allowed and will cause the function to die. We assume +# that there's more chance one passes such combination as a mistake, than that +# there are more packages providing identically named jar without class +# collisions. +# ------------------------------------------------------------------------------ +java-pkg_register-optional-dependency() { + debug-print-function ${FUNCNAME} $* + + java-pkg_check-phase install + + [[ ${#} -gt 2 ]] && die "${FUNCNAME} takes at most two arguments" + + local pkgs="${1}" + local jar="${2}" + + [[ -z "${pkgs}" ]] && die "${FUNCNAME} called with no package(s) specified" + + if [[ "${EAPI}" == "1" ]]; then + pkgs="${pkgs//:/-}" + fi + + if [[ -z "${jar}" ]]; then + for pkg in ${pkgs//,/ }; do + java-pkg_record-jar_ --optional "${pkg}" + done + else + [[ ${pkgs} == *,* ]] && \ + die "${FUNCNAME} called with both package list and jar name" + java-pkg_record-jar_ --optional "${pkgs}" "${jar}" + fi + + java-pkg_do_write_ +} + +# ------------------------------------------------------------------------------ +# @ebuild-function java-pkg_register-environment-variable +# +# Register an arbitrary environment variable into package.env. The gjl launcher +# for this package or any package depending on this will export it into +# environement before executing java command. +# Must only be called in src_install phase. +# +# @param $1 - variable name +# @param $2 - variable value +# ------------------------------------------------------------------------------ +JAVA_PKG_EXTRA_ENV="${T}/java-pkg-extra-env" +JAVA_PKG_EXTRA_ENV_VARS="" +java-pkg_register-environment-variable() { + debug-print-function ${FUNCNAME} $* + + java-pkg_check-phase install + + [[ ${#} != 2 ]] && die "${FUNCNAME} takes two arguments" + + echo "${1}=\"${2}\"" >> ${JAVA_PKG_EXTRA_ENV} + JAVA_PKG_EXTRA_ENV_VARS="${JAVA_PKG_EXTRA_ENV_VARS} ${1}" + + java-pkg_do_write_ +} + +# ------------------------------------------------------------------------------ +# @ebuild-function java-pkg_get-bootclasspath +# +# Returns classpath of a given bootclasspath-providing package version. +# +# @param $1 - the version of bootclasspath (e.g. 1.5), 'auto' for bootclasspath +# of the current JDK +# ------------------------------------------------------------------------------ + +java-pkg_get-bootclasspath() { + local version="${1}" + + local bcp + case "${version}" in + auto) + bcp="$(java-config -g BOOTCLASSPATH)" + ;; + 1.5) + bcp="$(java-pkg_getjars --build-only gnu-classpath-0.98)" + ;; + *) + eerror "unknown parameter of java-pkg_get-bootclasspath" + die "unknown parameter of java-pkg_get-bootclasspath" + ;; + esac + + echo "${bcp}" +} + + +# This function reads stdin, and based on that input, figures out how to +# populate jars from the filesystem. +# Need to figure out a good way of making use of this, ie be able to use a +# string that was built instead of stdin +# NOTE: this isn't quite ready for primetime. +#java-pkg_populate-jars() { +# local line +# +# read line +# while [[ -n "${line}" ]]; do +# # Ignore comments +# [[ ${line%%#*} == "" ]] && continue +# +# # get rid of any spaces +# line="${line// /}" +# +# # format: path=jarinfo +# local path=${line%%=*} +# local jarinfo=${line##*=} +# +# # format: jar@package +# local jar=${jarinfo%%@*}.jar +# local package=${jarinfo##*@} +# if [[ -n ${replace_only} ]]; then +# [[ ! -f $path ]] && die "No jar exists at ${path}" +# fi +# if [[ -n ${create_parent} ]]; then +# local parent=$(dirname ${path}) +# mkdir -p "${parent}" +# fi +# java-pkg_jar-from "${package}" "${jar}" "${path}" +# +# read line +# done +#} + +# ------------------------------------------------------------------------------ +# @section-end query +# ------------------------------------------------------------------------------ + +# ------------------------------------------------------------------------------ +# @section-begin helper +# @section-summary Helper functions +# +# Various other functions to use from an ebuild +# ------------------------------------------------------------------------------ + +# ------------------------------------------------------------------------------ +# @ebuild-function java-pkg_need +# +# Adds virtual dependencies, which can optionally be controlled by a USE flag. +# Currently supported virtuals are: +# javamail +# jdbc-stdext +# jaf +# jdbc-rowset +# jms +# +# @param $1 - Optionally indicate that the dependencies are controlled by +# a use flag by specifying '--use' Requires $2. +# @param $2 - USE flag which will enable the dependencies. +# @param $@ - virtual packages to add depenedencies for +# ------------------------------------------------------------------------------ +# TODO rewrite to parse a line based declaration file instead -- karltk +#java-pkg_need() { +# debug-print-function ${FUNCNAME} $* +# local useflag +# if [[ ${1} == "--use" ]]; then +# useflag="${2}" +# shift 2 +# fi +# +# if [[ -z ${1} ]]; then +# die "Must specify at least one virtual package." +# fi +# +# local depstr newdepstr +# +# for virtual in ${@}; do +# if has ${virtual} ${JAVA_PKG_VNEED}; then +# debug-print "Already registered virtual ${virtual}" +# continue +# fi +# case ${virtual} in +# javamail) +# debug-print "java-pkg_need: adding javamail dependencies" +# newdepstr="|| ( dev-java/gnu-javamail dev-java/sun-javamail-bin )" +# ;; +# jdbc-stdext) +# debug-print "java-pkg_need: adding jdbc-stdext dependencies" +# newdepstr="|| ( >=virtual/jdk-1.4 dev-java/jdbc2-stdext )" +# ;; +# jaf) +# debug-print "java-pkg_need: adding jaf dependencies" +# newdepstr="|| ( dev-java/gnu-jaf dev-java/sun-jaf-bin )" +# ;; +# jdbc-rowset) +# debug-print "java-pkg_need: adding jdbc-rowset dependencies" +# newdepstr="|| ( >=virtual/jdk-1.5 dev-java/sun-jdbc-rowset )" +# ;; +# jms) +# debug-print "java-pkg_need: adding jms dependencies" +# newdepstr="|| ( dev-java/sun-jms dev-java/openjms )" +# ;; +# *) +# die "Invalid virtual: ${virtual}" +# esac +# +# export JAVA_PKG_VNEED="${JAVA_PKG_VNEED} ${virtual}" +# +# if [[ -n ${useflag} ]]; then +# depstr="${depstr} ${useflag}? ( ${newdepstr} )" +# else +# depstr="${depstr} ${newdepstr}" +# fi +# done +# +# [[ -z ${JAVA_PKG_NV_DEPEND} ]] && export JAVA_PKG_NV_DEPEND="${DEPEND}" +# [[ -z ${JAVA_PKG_NV_RDEPEND} ]] && export JAVA_PKG_NV_RDEPEND="${RDEPEND}" +# +# export DEPEND="${DEPEND} ${depstr}" +# export RDEPEND="${RDEPEND} ${depstr}" +#} + +# ------------------------------------------------------------------------------ +# @ebuild-function java-pkg_find-normal-jars +# +# Find the files with suffix .jar file in the given directory or $WORKDIR +# +# @param $1 - The directory to search for jar files (default: ${WORKDIR}) +# ------------------------------------------------------------------------------ +java-pkg_find-normal-jars() { + local dir=$1 + [[ "${dir}" ]] || dir="${WORKDIR}" + local found + for jar in $(find "${dir}" -name "*.jar" -type f); do + echo "${jar}" + found="true" + done + [[ "${found}" ]] + return $? +} + +# ------------------------------------------------------------------------------ +# @ebuild-function java-pkg_ensure-no-bundled-jars +# +# Try to locate bundled jar files in ${WORKDIR} and die if found. +# This function should be called after WORKDIR has been populated with symlink +# to system jar files or bundled jars removed. +# ------------------------------------------------------------------------------ +java-pkg_ensure-no-bundled-jars() { + debug-print-function ${FUNCNAME} $* + + local bundled_jars=$(java-pkg_find-normal-jars) + if [[ -n ${bundled_jars} ]]; then + echo "Bundled jars found:" + local jar + for jar in ${bundled_jars}; do + echo $(pwd)${jar/./} + done + die "Bundled jars found!" + fi +} + +# ------------------------------------------------------------------------------ +# @internal-function java-pkg_ensure-vm-version-sufficient +# +# Checks if we have a sufficient VM and dies if we don't. +# +# ------------------------------------------------------------------------------ +java-pkg_ensure-vm-version-sufficient() { + debug-print-function ${FUNCNAME} $* + + if ! java-pkg_is-vm-version-sufficient; then + debug-print "VM is not suffient" + eerror "Current Java VM cannot build this package" + einfo "Please use java-config -S to set the correct one" + die "Active Java VM cannot build this package" + fi +} + +# ------------------------------------------------------------------------------ +# @internal-function java-pkg_is-vm-version-sufficient +# +# @return zero - VM is sufficient +# @return non-zero - VM is not sufficient +# ------------------------------------------------------------------------------ +java-pkg_is-vm-version-sufficient() { + debug-print-function ${FUNCNAME} $* + + depend-java-query --is-sufficient "${DEPEND}" > /dev/null + return $? +} + +# ------------------------------------------------------------------------------ +# @internal-function java-pkg_ensure-vm-version-eq +# +# Die if the current VM is not equal to the argument passed. +# +# @param $@ - Desired VM version to ensure +# ------------------------------------------------------------------------------ +java-pkg_ensure-vm-version-eq() { + debug-print-function ${FUNCNAME} $* + + if ! java-pkg_is-vm-version-eq $@ ; then + debug-print "VM is not suffient" + eerror "This package requires a Java VM version = $@" + einfo "Please use java-config -S to set the correct one" + die "Active Java VM too old" + fi +} + +# ------------------------------------------------------------------------------ +# @internal-function java-pkg_is-vm-version-eq +# +# @param $@ - VM version to compare current VM to +# @return zero - VM versions are equal +# @return non-zero - VM version are not equal +# ------------------------------------------------------------------------------ +java-pkg_is-vm-version-eq() { + debug-print-function ${FUNCNAME} $* + + local needed_version="$@" + + [[ -z "${needed_version}" ]] && die "need an argument" + + local vm_version="$(java-pkg_get-vm-version)" + + vm_version="$(get_version_component_range 1-2 "${vm_version}")" + needed_version="$(get_version_component_range 1-2 "${needed_version}")" + + if [[ -z "${vm_version}" ]]; then + debug-print "Could not get JDK version from DEPEND" + return 1 + else + if [[ "${vm_version}" == "${needed_version}" ]]; then + debug-print "Detected a JDK(${vm_version}) = ${needed_version}" + return 0 + else + debug-print "Detected a JDK(${vm_version}) != ${needed_version}" + return 1 + fi + fi +} + +# ------------------------------------------------------------------------------ +# @internal-function java-pkg_ensure-vm-version-ge +# +# Die if the current VM is not greater than the desired version +# +# @param $@ - VM version to compare current to +# ------------------------------------------------------------------------------ +java-pkg_ensure-vm-version-ge() { + debug-print-function ${FUNCNAME} $* + + if ! java-pkg_is-vm-version-ge "$@" ; then + debug-print "vm is not suffient" + eerror "This package requires a Java VM version >= $@" + einfo "Please use java-config -S to set the correct one" + die "Active Java VM too old" + fi +} + +# ------------------------------------------------------------------------------ +# @internal-function java-pkg_is-vm-version-ge +# +# @param $@ - VM version to compare current VM to +# @return zero - current VM version is greater than checked version +# @return non-zero - current VM version is not greater than checked version +# ------------------------------------------------------------------------------ +java-pkg_is-vm-version-ge() { + debug-print-function ${FUNCNAME} $* + + local needed_version=$@ + local vm_version=$(java-pkg_get-vm-version) + if [[ -z "${vm_version}" ]]; then + debug-print "Could not get JDK version from DEPEND" + return 1 + else + if version_is_at_least "${needed_version}" "${vm_version}"; then + debug-print "Detected a JDK(${vm_version}) >= ${needed_version}" + return 0 + else + debug-print "Detected a JDK(${vm_version}) < ${needed_version}" + return 1 + fi + fi +} + +java-pkg_set-current-vm() { + export GENTOO_VM=${1} +} + +java-pkg_get-current-vm() { + echo ${GENTOO_VM} +} + +java-pkg_current-vm-matches() { + hasq $(java-pkg_get-current-vm) ${@} + return $? +} + +# ------------------------------------------------------------------------------ +# @ebuild-function java-pkg_get-source +# +# Determines what source version should be used, for passing to -source. +# Unless you want to break things you probably shouldn't set _WANT_SOURCE +# +# @return string - Either the lowest possible source, or JAVA_PKG_WANT_SOURCE +# ------------------------------------------------------------------------------ +java-pkg_get-source() { + echo ${JAVA_PKG_WANT_SOURCE:-$(depend-java-query --get-lowest "${DEPEND} ${RDEPEND}")} +} + +# ------------------------------------------------------------------------------ +# @ebuild-function java-pkg_get-target +# +# Determines what target version should be used, for passing to -target. +# If you don't care about lower versions, you can set _WANT_TARGET to the +# version of your JDK. +# +# @return string - Either the lowest possible target, or JAVA_PKG_WANT_TARGET +# ------------------------------------------------------------------------------ +java-pkg_get-target() { + echo ${JAVA_PKG_WANT_TARGET:-$(depend-java-query --get-lowest "${DEPEND} ${RDEPEND}")} +} + +java-pkg_get-javac() { + debug-print-function ${FUNCNAME} $* + + + local compiler="${GENTOO_COMPILER}" + + local compiler_executable + if [[ "${compiler}" = "javac" ]]; then + # nothing fancy needs to be done for javac + compiler_executable="javac" + else + # for everything else, try to determine from an env file + + local compiler_env="/usr/share/java-config-2/compiler/${compiler}" + if [[ -f ${compiler_env} ]]; then + local old_javac=${JAVAC} + unset JAVAC + # try to get value of JAVAC + compiler_executable="$(source ${compiler_env} 1>/dev/null 2>&1; echo ${JAVAC})" + export JAVAC=${old_javac} + + if [[ -z ${compiler_executable} ]]; then + echo "JAVAC is empty or undefined in ${compiler_env}" + return 1 + fi + + # check that it's executable + if [[ ! -x ${compiler_executable} ]]; then + echo "${compiler_executable} doesn't exist, or isn't executable" + return 1 + fi + else + echo "Could not find environment file for ${compiler}" + return 1 + fi + fi + echo ${compiler_executable} +} + +# ------------------------------------------------------------------------------ +# @ebuild-function java-pkg_javac-args +# +# If an ebuild uses javac directly, instead of using ejavac, it should call this +# to know what -source/-target to use. +# +# @return string - arguments to pass to javac, complete with -target and -source +# ------------------------------------------------------------------------------ +java-pkg_javac-args() { + debug-print-function ${FUNCNAME} $* + + local want_source="$(java-pkg_get-source)" + local want_target="$(java-pkg_get-target)" + + local source_str="-source ${want_source}" + local target_str="-target ${want_target}" + + debug-print "want source: ${want_source}" + debug-print "want target: ${want_target}" + + if [[ -z "${want_source}" || -z "${want_target}" ]]; then + debug-print "could not find valid -source/-target values for javac" + echo "Could not find valid -source/-target values for javac" + return 1 + else + if java-pkg_is-vm-version-ge "1.4"; then + echo "${source_str} ${target_str}" + else + echo "${target_str}" + fi + fi +} + +# TODO document +java-pkg_get-jni-cflags() { + local flags="-I${JAVA_HOME}/include" + + local platform="linux" + use elibc_FreeBSD && platform="freebsd" + + # TODO do a check that the directories are valid + flags="${flags} -I${JAVA_HOME}/include/${platform}" + + echo ${flags} +} + +java-pkg_ensure-gcj() { + if ! built_with_use sys-devel/gcc gcj ; then + ewarn + ewarn "You must build gcc with the gcj support to build with gcj" + ewarn + ebeep 5 + die "No GCJ support found!" + fi +} + +java-pkg_ensure-test() { + if hasq test ${FEATURES} && ! hasq -test ${FEATURES} \ + && hasq test ${IUSE} && ! use test; + then + eerror "You specified FEATURES=test, but USE=test is needed" + eerror "to pull in the additional dependencies for testing" + die "Need USE=test enabled" + fi +} + +# ------------------------------------------------------------------------------ +# @ebuild-function java-pkg_register-ant-task +# +# Register this package as ant task, so that ant will load it when no specific +# ANT_TASKS are specified. Note that even without this registering, all packages +# specified in ANT_TASKS will be loaded. Mostly used by the actual ant tasks +# packages, but can be also used by other ebuilds that used to symlink their +# .jar into /usr/share/ant-core/lib to get autoloaded, for backwards +# compatibility. +# +# @param --version x.y Register only for ant version x.y (otherwise for any ant +# version). Used by the ant-* packages to prevent loading of mismatched +# ant-core ant tasks after core was updated, before the tasks are updated, +# without a need for blockers. +# @param $1 Name to register as. Defaults to JAVA_PKG_NAME ($PN[-$SLOT]) +# ------------------------------------------------------------------------------ +java-pkg_register-ant-task() { + local TASKS_DIR="tasks" + + # check for --version x.y parameters + while [[ -n "${1}" && -n "${2}" ]]; do + local var="${1#--}" + local val="${2}" + if [[ "${var}" == "version" ]]; then + TASKS_DIR="tasks-${val}" + else + die "Unknown parameter passed to java-pkg_register-ant-tasks: ${1} ${2}" + fi + shift 2 + done + + local TASK_NAME="${1:-${JAVA_PKG_NAME}}" + + dodir /usr/share/ant/${TASKS_DIR} + touch "${D}/usr/share/ant/${TASKS_DIR}/${TASK_NAME}" +} + +# ------------------------------------------------------------------------------ +# @internal-function java-pkg_ant-tasks-depend +# +# Translates the WANT_ANT_TASKS variable into valid dependencies. +# ------------------------------------------------------------------------------ +java-pkg_ant-tasks-depend() { + debug-print-function ${FUNCNAME} ${WANT_ANT_TASKS} + + if [[ -n "${WANT_ANT_TASKS}" ]]; then + local DEP="" + for i in ${WANT_ANT_TASKS} + do + if [[ ${i} = ant-* ]]; then + DEP="${DEP}dev-java/${i} " + elif [[ ${i} = */*:* ]]; then + DEP="${DEP}${i} " + else + echo "Invalid atom in WANT_ANT_TASKS: ${i}" + return 1 + fi + done + echo ${DEP} + return 0 + else + return 0 + fi +} + +# ------------------------------------------------------------------------------ +# @ebuild-function ejunit +# +# Junit wrapper function. Makes it easier to run the tests and checks for +# dev-java/junit in DEPEND. Launches the tests using junit.textui.TestRunner. +# +# Examples: +# ejunit -cp build/classes org.blinkenlights.jid3.test.AllTests +# ejunit org.blinkenlights.jid3.test.AllTests +# +# @param $1 - -cp or -classpath +# @param $2 - classpath; junit and recorded dependencies get appended +# @param $@ - the rest of the parameters are passed to java +# ------------------------------------------------------------------------------ +ejunit() { + debug-print-function ${FUNCNAME} $* + + local pkgs + if [[ -f ${JAVA_PKG_DEPEND_FILE} ]]; then + for atom in $(cat ${JAVA_PKG_DEPEND_FILE} | tr : ' '); do + pkgs=${pkgs},$(echo ${atom} | sed -re "s/^.*@//") + done + fi + + local cp=$(java-pkg_getjars --with-dependencies junit${pkgs}) + if [[ ${1} = -cp || ${1} = -classpath ]]; then + cp="${2}:${cp}" + shift 2 + else + cp=".:${cp}" + fi + + local runner=junit.textui.TestRunner + debug-print "Calling: java -cp \"${cp}\" -Djava.awt.headless=true ${runner} ${@}" + java -cp "${cp}" -Djava.awt.headless=true ${runner} "${@}" || die "Running junit failed" +} + +# ------------------------------------------------------------------------------ +# @section-end helper +# ------------------------------------------------------------------------------ + +# ------------------------------------------------------------------------------ +# @eclass-src_prepare +# +# src_prepare Searches for bundled jars +# Don't call directly, but via java-pkg-2_src_prepare! +# ------------------------------------------------------------------------------ + +java-utils-2_src_prepare() { + [[ ${EBUILD_PHASE} == prepare ]] && + java-pkg_func-exists java_prepare && java_prepare + + # Remember that eant will call this unless called via Portage + if [[ ! -e "${T}/java-utils-2_src_prepare-run" ]] && is-java-strict; then + echo "Searching for bundled jars:" + java-pkg_find-normal-jars || echo "None found." + echo "Searching for bundled classes (no output if none found):" + find "${WORKDIR}" -name "*.class" + echo "Search done." + fi + touch "${T}/java-utils-2_src_prepare-run" +} + +# ------------------------------------------------------------------------------ +# @eclass-pkg_preinst +# +# pkg_preinst Searches for missing and unneeded dependencies +# Don't call directly, but via java-pkg-2_pkg_preinst! +# ------------------------------------------------------------------------------ + +java-utils-2_pkg_preinst() { + if is-java-strict; then + if has_version dev-java/java-dep-check; then + [[ -e "${JAVA_PKG_ENV}" ]] || return + local output=$(GENTOO_VM= java-dep-check --image "${D}" "${JAVA_PKG_ENV}") + if [[ ${output} && has_version <=dev-java/java-dep-check-0.2 ]]; then + ewarn "Possibly unneeded dependencies found in package.env:" + for dep in ${output}; do + ewarn "\t${dep}" + done + fi + if [[ ${output} && has_version >dev-java/java-dep-check-0.2 ]]; then + ewarn "${output}" + fi + else + eerror "Install dev-java/java-dep-check for dependency checking" + fi + fi +} + +# ------------------------------------------------------------------------------ +# @section-begin build +# @section-summary Build functions +# +# These are some functions for building a package. In particular, it consists of +# wrappers for javac and ant. +# ------------------------------------------------------------------------------ + +# ------------------------------------------------------------------------------ +# @ebuild-function eant +# +# Ant wrapper function. Will use the appropriate compiler, based on user-defined +# compiler. Will also set proper ANT_TASKS from the variable ANT_TASKS, +# variables: +# EANT_GENTOO_CLASSPATH - calls java-pkg_getjars for the value and adds to the +# gentoo.classpath property. Be sure to call +# java-ant_rewrite-classpath in src_unpack. +# EANT_NEEDS_TOOLS - add tools.jar to the gentoo.classpath. Should only be used +# for build-time purposes, the dependency is not recorded to +# package.env! +# JAVA_PKG_NO_BUNDLED_SEARCH - Don't search for bundled jars or class files +# *ANT_TASKS - used to determine ANT_TASKS before calling Ant. +# ------------------------------------------------------------------------------ +eant() { + debug-print-function ${FUNCNAME} $* + + if [[ ${EBUILD_PHASE} = compile ]]; then + java-ant-2_src_configure + java-utils-2_src_prepare + fi + + if ! hasq java-ant-2 ${INHERITED}; then + local msg="You should inherit java-ant-2 when using eant" + java-pkg_announce-qa-violation "${msg}" + fi + + local antflags="-Dnoget=true -Dmaven.mode.offline=true -Dbuild.sysclasspath=ignore" + + java-pkg_init-compiler_ + local compiler="${GENTOO_COMPILER}" + + local compiler_env="${JAVA_PKG_COMPILER_DIR}/${compiler}" + local build_compiler="$(source ${compiler_env} 1>/dev/null 2>&1; echo ${ANT_BUILD_COMPILER})" + if [[ "${compiler}" != "javac" && -z "${build_compiler}" ]]; then + die "ANT_BUILD_COMPILER undefined in ${compiler_env}" + fi + + if [[ ${compiler} != "javac" ]]; then + antflags="${antflags} -Dbuild.compiler=${build_compiler}" + # Figure out any extra stuff to put on the classpath for compilers aside + # from javac + # ANT_BUILD_COMPILER_DEPS should be something that could be passed to + # java-config -p + local build_compiler_deps="$(source ${JAVA_PKG_COMPILER_DIR}/${compiler} 1>/dev/null 2>&1; echo ${ANT_BUILD_COMPILER_DEPS})" + if [[ -n ${build_compiler_deps} ]]; then + antflags="${antflags} -lib $(java-config -p ${build_compiler_deps})" + fi + fi + + for arg in "${@}"; do + if [[ ${arg} = -lib ]]; then + if is-java-strict; then + eerror "You should not use the -lib argument to eant because it will fail" + eerror "with JAVA_PKG_STRICT. Please use for example java-pkg_jar-from" + eerror "or ant properties to make dependencies available." + eerror "For ant tasks use WANT_ANT_TASKS or ANT_TASKS from." + eerror "split ant (>=dev-java/ant-core-1.7)." + die "eant -lib is deprecated/forbidden" + else + echo "eant -lib is deprecated. Turn JAVA_PKG_STRICT on for" + echo "more info." + fi + fi + done + + # parse WANT_ANT_TASKS for atoms + local want_ant_tasks + for i in ${WANT_ANT_TASKS}; do + if [[ ${i} = */*:* ]]; then + i=${i#*/} + i=${i%:0} + want_ant_tasks+="${i/:/-} " + else + want_ant_tasks+="${i} " + fi + done + # default ANT_TASKS to WANT_ANT_TASKS, if ANT_TASKS is not set explicitly + ANT_TASKS="${ANT_TASKS:-${want_ant_tasks% }}" + + # override ANT_TASKS with JAVA_PKG_FORCE_ANT_TASKS if it's set + ANT_TASKS="${JAVA_PKG_FORCE_ANT_TASKS:-${ANT_TASKS}}" + + # if ant-tasks is not set by ebuild or forced, use none + ANT_TASKS="${ANT_TASKS:-none}" + + # at this point, ANT_TASKS should be "all", "none" or explicit list + if [[ "${ANT_TASKS}" == "all" ]]; then + einfo "Using all available ANT_TASKS" + elif [[ "${ANT_TASKS}" == "none" ]]; then + einfo "Disabling all optional ANT_TASKS" + else + einfo "Using following ANT_TASKS: ${ANT_TASKS}" + fi + + export ANT_TASKS + + [[ -n ${JAVA_PKG_DEBUG} ]] && antflags="${antflags} --execdebug -debug" + [[ -n ${PORTAGE_QUIET} ]] && antflags="${antflags} -q" + + local gcp="${EANT_GENTOO_CLASSPATH}" + local getjarsarg="" + + if [[ ${EBUILD_PHASE} = "test" ]]; then + antflags="${antflags} -DJunit.present=true" + [[ ${ANT_TASKS} = *ant-junit* ]] && gcp="${gcp} junit" + getjarsarg="--with-dependencies" + fi + + local cp + + for atom in ${gcp}; do + cp="${cp}:$(java-pkg_getjars ${getjarsarg} ${atom})" + done + + [[ -n "${EANT_NEEDS_TOOLS}" ]] && cp="${cp}:$(java-config --tools)" + + if [[ ${cp} ]]; then + # It seems ant does not like single quotes around ${cp} + cp=${cp#:} + [[ ${EANT_GENTOO_CLASSPATH_EXTRA} ]] && \ + cp="${cp}:${EANT_GENTOO_CLASSPATH_EXTRA}" + antflags="${antflags} -Dgentoo.classpath=\"${cp}\"" + fi + + [[ -n ${JAVA_PKG_DEBUG} ]] && echo ant ${antflags} "${@}" + debug-print "Calling ant (GENTOO_VM: ${GENTOO_VM}): ${antflags} ${@}" + ant ${antflags} "${@}" || die "eant failed" +} + +# ------------------------------------------------------------------------------ +# @ebuild-function ejavac +# +# Javac wrapper function. Will use the appropriate compiler, based on +# /etc/java-config/compilers.conf +# +# @param $@ - Arguments to be passed to the compiler +# ------------------------------------------------------------------------------ +ejavac() { + debug-print-function ${FUNCNAME} $* + + java-pkg_init-compiler_ + + local compiler_executable + compiler_executable=$(java-pkg_get-javac) + if [[ ${?} != 0 ]]; then + eerror "There was a problem determining compiler: ${compiler_executable}" + die "get-javac failed" + fi + + local javac_args + javac_args="$(java-pkg_javac-args)" + if [[ ${?} != 0 ]]; then + eerror "There was a problem determining JAVACFLAGS: ${javac_args}" + die "java-pkg_javac-args failed" + fi + + [[ -n ${JAVA_PKG_DEBUG} ]] && echo ${compiler_executable} ${javac_args} "${@}" + ${compiler_executable} ${javac_args} "${@}" || die "ejavac failed" +} + +# ------------------------------------------------------------------------------ +# @ebuild-function java-pkg_filter-compiler +# +# Used to prevent the use of some compilers. Should be used in src_compile. +# Basically, it just appends onto JAVA_PKG_FILTER_COMPILER +# +# @param $@ - compilers to filter +# ------------------------------------------------------------------------------ +java-pkg_filter-compiler() { + JAVA_PKG_FILTER_COMPILER="${JAVA_PKG_FILTER_COMPILER} $@" +} + +# ------------------------------------------------------------------------------ +# @ebuild-function java-pkg_force-compiler +# +# Used to force the use of particular compilers. Should be used in src_compile. +# A common use of this would be to force ecj-3.1 to be used on amd64, to avoid +# OutOfMemoryErrors that may come up. +# +# @param $@ - compilers to force +# ------------------------------------------------------------------------------ +java-pkg_force-compiler() { + JAVA_PKG_FORCE_COMPILER="$@" +} + +# ------------------------------------------------------------------------------ +# @ebuild-function use_doc +# +# Helper function for getting ant to build javadocs. If the user has USE=doc, +# then 'javadoc' or the argument are returned. Otherwise, there is no return. +# +# The output of this should be passed to ant. +# +# Example: build javadocs by calling 'javadoc' target +# eant $(use_doc) +# Example: build javadocs by calling 'apidoc' target +# eant $(use_doc apidoc) +# +# @param $@ - Option value to return. Defaults to 'javadoc' +# @return string - Name of the target to create javadocs +# ------------------------------------------------------------------------------ +use_doc() { + use doc && echo ${@:-javadoc} +} + + +# ------------------------------------------------------------------------------ +# @section-end build +# ------------------------------------------------------------------------------ + +# ------------------------------------------------------------------------------ +# @section-begin internal +# @section-summary Internal functions +# +# Do __NOT__ use any of these from an ebuild! These are only to be used from +# within the java eclasses. +# ------------------------------------------------------------------------------ + +# ----------------------------------------------------------------------------- +# @function-internal java-pkg_init +# +# The purpose of this function, as the name might imply, is to initialize the +# Java environment. It ensures that that there aren't any environment variables +# that'll muss things up. It initializes some variables, which are used +# internally. And most importantly, it'll switch the VM if necessary. +# +# This shouldn't be used directly. Instead, java-pkg and java-pkg-opt will +# call it during each of the phases of the merge process. +# +# ----------------------------------------------------------------------------- +java-pkg_init() { + debug-print-function ${FUNCNAME} $* + unset JAVAC + unset JAVA_HOME + + java-config --help >/dev/null || { + eerror "" + eerror "Can't run java-config --help" + eerror "Have you upgraded python recently but haven't" + eerror "run python-updater yet?" + die "Can't run java-config --help" + } + + # People do all kinds of weird things. + # http://forums.gentoo.org/viewtopic-p-3943166.html + local silence="${SILENCE_JAVA_OPTIONS_WARNING}" + local accept="${I_WANT_GLOBAL_JAVA_OPTIONS}" + if [[ -n ${_JAVA_OPTIONS} && -z ${accept} && -z ${silence} ]]; then + ewarn "_JAVA_OPTIONS changes what java -version outputs at least for" + ewarn "sun-jdk vms and and as such break configure scripts that" + ewarn "use it (for example app-office/openoffice) so we filter it out." + ewarn "Use SILENCE_JAVA_OPTIONS_WARNING=true in the environment (use" + ewarn "make.conf for example) to silence this warning or" + ewarn "I_WANT_GLOBAL_JAVA_OPTIONS to not filter it." + fi + + if [[ -z ${accept} ]]; then + # export _JAVA_OPTIONS= doesn't work because it will show up in java + # -version output + unset _JAVA_OPTIONS + # phase hooks make this run many times without this + I_WANT_GLOBAL_JAVA_OPTIONS="true" + fi + + if java-pkg_func-exists ant_src_unpack; then + java-pkg_announce-qa-violation "Using old ant_src_unpack. Should be src_unpack" + fi + + java-pkg_init_paths_ + java-pkg_switch-vm + PATH=${JAVA_HOME}/bin:${PATH} + + # TODO we will probably want to set JAVAC and JAVACFLAGS + + # Do some QA checks + java-pkg_check-jikes + + # Can't use unset here because Portage does not save the unset + # see https://bugs.gentoo.org/show_bug.cgi?id=189417#c11 + + # When users have crazy classpaths some packages can fail to compile. + # and everything should work with empty CLASSPATH. + # This also helps prevent unexpected dependencies on random things + # from the CLASSPATH. + export CLASSPATH= + + # Unset external ANT_ stuff + export ANT_TASKS= + export ANT_OPTS= + export ANT_RESPECT_JAVA_HOME= +} + +# ------------------------------------------------------------------------------ +# @function-internal java-pkg-init-compiler_ +# +# This function attempts to figure out what compiler should be used. It does +# this by reading the file at JAVA_PKG_COMPILERS_CONF, and checking the +# COMPILERS variable defined there. +# This can be overridden by a list in JAVA_PKG_FORCE_COMPILER +# +# It will go through the list of compilers, and verify that it supports the +# target and source that are needed. If it is not suitable, then the next +# compiler is checked. When JAVA_PKG_FORCE_COMPILER is defined, this checking +# isn't done. +# +# Once the which compiler to use has been figured out, it is set to +# GENTOO_COMPILER. +# +# If you hadn't guessed, JAVA_PKG_FORCE_COMPILER is for testing only. +# +# If the user doesn't defined anything in JAVA_PKG_COMPILERS_CONF, or no +# suitable compiler was found there, then the default is to use javac provided +# by the current VM. +# +# +# @return name of the compiler to use +# ------------------------------------------------------------------------------ +java-pkg_init-compiler_() { + debug-print-function ${FUNCNAME} $* + + if [[ -n ${GENTOO_COMPILER} ]]; then + debug-print "GENTOO_COMPILER already set" + return + fi + + local compilers; + if [[ -z ${JAVA_PKG_FORCE_COMPILER} ]]; then + compilers="$(source ${JAVA_PKG_COMPILERS_CONF} 1>/dev/null 2>&1; echo ${COMPILERS})" + else + compilers=${JAVA_PKG_FORCE_COMPILER} + fi + + debug-print "Read \"${compilers}\" from ${JAVA_PKG_COMPILERS_CONF}" + + # Figure out if we should announce what compiler we're using + local compiler + for compiler in ${compilers}; do + debug-print "Checking ${compiler}..." + # javac should always be alright + if [[ ${compiler} = "javac" ]]; then + debug-print "Found javac... breaking" + export GENTOO_COMPILER="javac" + break + fi + + if has ${compiler} ${JAVA_PKG_FILTER_COMPILER}; then + if [[ -z ${JAVA_PKG_FORCE_COMPILER} ]]; then + einfo "Filtering ${compiler}" + continue + fi + fi + + # for non-javac, we need to make sure it supports the right target and + # source + local compiler_env="${JAVA_PKG_COMPILER_DIR}/${compiler}" + if [[ -f ${compiler_env} ]]; then + local desired_target="$(java-pkg_get-target)" + local desired_source="$(java-pkg_get-source)" + + + # Verify that the compiler supports target + local supported_target=$(source ${compiler_env} 1>/dev/null 2>&1; echo ${SUPPORTED_TARGET}) + if ! has ${desired_target} ${supported_target}; then + ewarn "${compiler} does not support -target ${desired_target}, skipping" + continue + fi + + # -source was introduced in 1.3, so only check 1.3 and on + if version_is_at_least "${desired_soure}" "1.3"; then + # Verify that the compiler supports source + local supported_source=$(source ${compiler_env} 1>/dev/null 2>&1; echo ${SUPPORTED_SOURCE}) + if ! has ${desired_source} ${supported_source}; then + ewarn "${compiler} does not support -source ${desired_source}, skipping" + continue + fi + fi + + # if you get here, then the compiler should be good to go + export GENTOO_COMPILER="${compiler}" + break + else + ewarn "Could not find configuration for ${compiler}, skipping" + ewarn "Perhaps it is not installed?" + continue + fi + done + + # If it hasn't been defined already, default to javac + if [[ -z ${GENTOO_COMPILER} ]]; then + if [[ -n ${compilers} ]]; then + einfo "No suitable compiler found: defaulting to JDK default for compilation" + else + # probably don't need to notify users about the default. + :;#einfo "Defaulting to javac for compilation" + fi + if java-config -g GENTOO_COMPILER 2> /dev/null; then + export GENTOO_COMPILER=$(java-config -g GENTOO_COMPILER) + else + export GENTOO_COMPILER=javac + fi + else + einfo "Using ${GENTOO_COMPILER} for compilation" + fi + +} + +# ------------------------------------------------------------------------------ +# @internal-function init_paths_ +# +# Initializes some variables that will be used. These variables are mostly used +# to determine where things will eventually get installed. +# ------------------------------------------------------------------------------ +java-pkg_init_paths_() { + debug-print-function ${FUNCNAME} $* + + local pkg_name + if [[ "$SLOT" == "0" ]] ; then + JAVA_PKG_NAME="${PN}" + else + JAVA_PKG_NAME="${PN}-${SLOT}" + fi + + JAVA_PKG_SHAREPATH="${DESTTREE}/share/${JAVA_PKG_NAME}" + JAVA_PKG_SOURCESPATH="${JAVA_PKG_SHAREPATH}/sources/" + JAVA_PKG_ENV="${D}${JAVA_PKG_SHAREPATH}/package.env" + JAVA_PKG_VIRTUALS_PATH="${DESTTREE}/share/java-config-2/virtuals" + JAVA_PKG_VIRTUAL_PROVIDER="${D}/${JAVA_PKG_VIRTUALS_PATH}/${JAVA_PKG_NAME}" + + [[ -z "${JAVA_PKG_JARDEST}" ]] && JAVA_PKG_JARDEST="${JAVA_PKG_SHAREPATH}/lib" + [[ -z "${JAVA_PKG_LIBDEST}" ]] && JAVA_PKG_LIBDEST="${DESTTREE}/$(get_libdir)/${JAVA_PKG_NAME}" + [[ -z "${JAVA_PKG_WARDEST}" ]] && JAVA_PKG_WARDEST="${JAVA_PKG_SHAREPATH}/webapps" + + + # TODO maybe only print once? + debug-print "JAVA_PKG_SHAREPATH: ${JAVA_PKG_SHAREPATH}" + debug-print "JAVA_PKG_ENV: ${JAVA_PKG_ENV}" + debug-print "JAVA_PKG_JARDEST: ${JAVA_PKG_JARDEST}" + debug-print "JAVA_PKG_LIBDEST: ${JAVA_PKG_LIBDEST}" + debug-print "JAVA_PKG_WARDEST: ${JAVA_PKG_WARDEST}" +} + +# ------------------------------------------------------------------------------ +# @internal-function java-pkg_do_write_ +# +# Writes the package.env out to disk. +# +# ------------------------------------------------------------------------------ +# TODO change to do-write, to match everything else +java-pkg_do_write_() { + debug-print-function ${FUNCNAME} $* + java-pkg_init_paths_ + # Create directory for package.env + dodir "${JAVA_PKG_SHAREPATH}" + if [[ -n "${JAVA_PKG_CLASSPATH}" || -n "${JAVA_PKG_LIBRARY}" || -f \ + "${JAVA_PKG_DEPEND_FILE}" || -f \ + "${JAVA_PKG_OPTIONAL_DEPEND_FILE}" ]]; then + # Create package.env + ( + echo "DESCRIPTION=\"${DESCRIPTION}\"" + echo "GENERATION=\"2\"" + echo "SLOT=\"${SLOT}\"" + echo "CATEGORY=\"${CATEGORY}\"" + echo "PVR=\"${PVR}\"" + + [[ -n "${JAVA_PKG_CLASSPATH}" ]] && echo "CLASSPATH=\"${JAVA_PKG_CLASSPATH}\"" + [[ -n "${JAVA_PKG_LIBRARY}" ]] && echo "LIBRARY_PATH=\"${JAVA_PKG_LIBRARY}\"" + [[ -n "${JAVA_PROVIDE}" ]] && echo "PROVIDES=\"${JAVA_PROVIDE}\"" + [[ -f "${JAVA_PKG_DEPEND_FILE}" ]] \ + && echo "DEPEND=\"$(sort -u "${JAVA_PKG_DEPEND_FILE}" | tr '\n' ':')\"" + [[ -f "${JAVA_PKG_OPTIONAL_DEPEND_FILE}" ]] \ + && echo "OPTIONAL_DEPEND=\"$(sort -u "${JAVA_PKG_OPTIONAL_DEPEND_FILE}" | tr '\n' ':')\"" + echo "VM=\"$(echo ${RDEPEND} ${DEPEND} | sed -e 's/ /\n/g' | sed -n -e '/virtual\/\(jre\|jdk\)/ { p;q }')\"" # TODO cleanup ! + [[ -f "${JAVA_PKG_BUILD_DEPEND_FILE}" ]] \ + && echo "BUILD_DEPEND=\"$(sort -u "${JAVA_PKG_BUILD_DEPEND_FILE}" | tr '\n' ':')\"" + ) > "${JAVA_PKG_ENV}" + + # register target/source + local target="$(java-pkg_get-target)" + local source="$(java-pkg_get-source)" + [[ -n ${target} ]] && echo "TARGET=\"${target}\"" >> "${JAVA_PKG_ENV}" + [[ -n ${source} ]] && echo "SOURCE=\"${source}\"" >> "${JAVA_PKG_ENV}" + + # register javadoc info + [[ -n ${JAVADOC_PATH} ]] && echo "JAVADOC_PATH=\"${JAVADOC_PATH}\"" \ + >> ${JAVA_PKG_ENV} + # register source archives + [[ -n ${JAVA_SOURCES} ]] && echo "JAVA_SOURCES=\"${JAVA_SOURCES}\"" \ + >> ${JAVA_PKG_ENV} + + + echo "MERGE_VM=\"${GENTOO_VM}\"" >> "${JAVA_PKG_ENV}" + [[ -n ${GENTOO_COMPILER} ]] && echo "MERGE_COMPILER=\"${GENTOO_COMPILER}\"" >> "${JAVA_PKG_ENV}" + + # extra env variables + if [[ -n "${JAVA_PKG_EXTRA_ENV_VARS}" ]]; then + cat "${JAVA_PKG_EXTRA_ENV}" >> "${JAVA_PKG_ENV}" || die + # nested echo to remove leading/trailing spaces + echo "ENV_VARS=\"$(echo ${JAVA_PKG_EXTRA_ENV_VARS})\"" \ + >> "${JAVA_PKG_ENV}" || die + fi + + # Strip unnecessary leading and trailing colons + # TODO try to cleanup if possible + sed -e "s/=\":/=\"/" -e "s/:\"$/\"/" -i "${JAVA_PKG_ENV}" || die "Did you forget to call java_init ?" + else + debug-print "JAVA_PKG_CLASSPATH, JAVA_PKG_LIBRARY, JAVA_PKG_DEPEND_FILE" + debug-print "or JAVA_PKG_OPTIONAL_DEPEND_FILE not defined so can't" + debug-print "write package.env." + fi +} + +# ------------------------------------------------------------------------------ +# @internal-function java-pkg_record-jar_ +# +# Record an (optional) dependency to the package.env +# @param --optional - record dependency as optional +# @param --build - record dependency as build_only +# @param $1 - package to record +# @param $2 - (optional) jar of package to record +# ------------------------------------------------------------------------------ +JAVA_PKG_DEPEND_FILE="${T}/java-pkg-depend" +JAVA_PKG_OPTIONAL_DEPEND_FILE="${T}/java-pkg-optional-depend" +JAVA_PKG_BUILD_DEPEND_FILE="${T}/java-pkg-build-depend" + +java-pkg_record-jar_() { + debug-print-function ${FUNCNAME} $* + + local depend_file="${JAVA_PKG_DEPEND_FILE}" + case "${1}" in + "--optional") depend_file="${JAVA_PKG_OPTIONAL_DEPEND_FILE}"; shift;; + "--build-only") depend_file="${JAVA_PKG_BUILD_DEPEND_FILE}"; shift;; + esac + + local pkg=${1} jar=${2} append + if [[ -z "${jar}" ]]; then + append="${pkg}" + else + append="$(basename ${jar})@${pkg}" + fi + + echo "${append}" >> "${depend_file}" +} + +# ------------------------------------------------------------------------------ +# @internal-function java-pkg_append_ +# +# Appends a value to a variable +# +# Example: java-pkg_append_ CLASSPATH foo.jar +# @param $1 variable name to modify +# @param $2 value to append +# ------------------------------------------------------------------------------ +java-pkg_append_() { + debug-print-function ${FUNCNAME} $* + + local var="${1}" value="${2}" + if [[ -z "${!var}" ]] ; then + export ${var}="${value}" + else + local oldIFS=${IFS} cur haveit + IFS=':' + for cur in ${!var}; do + if [[ ${cur} == ${value} ]]; then + haveit="yes" + break + fi + done + [[ -z ${haveit} ]] && export ${var}="${!var}:${value}" + IFS=${oldIFS} + fi +} + +# ------------------------------------------------------------------------------ +# @internal-function java-pkg_expand_dir_ +# +# Gets the full path of the file/directory's parent. +# @param $1 - file/directory to find parent directory for +# @return - path to $1's parent directory +# ------------------------------------------------------------------------------ +java-pkg_expand_dir_() { + pushd "$(dirname "${1}")" >/dev/null 2>&1 + pwd + popd >/dev/null 2>&1 +} + +# ------------------------------------------------------------------------------ +# @internal-function java-pkg_func-exists +# +# Does the indicated function exist? +# +# @return 0 - function is declared +# @return 1 - function is undeclared +# ------------------------------------------------------------------------------ +java-pkg_func-exists() { + declare -F ${1} > /dev/null +} + +# ------------------------------------------------------------------------------ +# @internal-function java-pkg_setup-vm +# +# Sets up the environment for a specific VM +# +# ------------------------------------------------------------------------------ +java-pkg_setup-vm() { + debug-print-function ${FUNCNAME} $* + + export LANG="C" LC_ALL="C" + + local vendor="$(java-pkg_get-vm-vendor)" + if [[ "${vendor}" == "sun" ]] && java-pkg_is-vm-version-ge "1.5" ; then + addpredict "/dev/random" + elif [[ "${vendor}" == "ibm" ]]; then + addpredict "/proc/self/maps" + addpredict "/proc/cpuinfo" + elif [[ "${vendor}" == "jrockit" ]]; then + addpredict "/proc/cpuinfo" + fi +} + +# ------------------------------------------------------------------------------ +# @internal-function java-pkg_needs-vm +# +# Does the current package depend on virtual/jdk? +# +# @return 0 - Package depends on virtual/jdk +# @return 1 - Package does not depend on virtual/jdk +# ------------------------------------------------------------------------------ +java-pkg_needs-vm() { + debug-print-function ${FUNCNAME} $* + + if [[ -n "$(echo ${JAVA_PKG_NV_DEPEND:-${DEPEND}} | sed -e '\:virtual/jdk:!d')" ]]; then + return 0 + fi + + return 1 +} + +# ------------------------------------------------------------------------------ +# @internal-function java-pkg_get-current-vm +# +# @return - The current VM being used +# ------------------------------------------------------------------------------ +java-pkg_get-current-vm() { + java-config -f +} + +# ------------------------------------------------------------------------------ +# @internal-function java-pkg_get-vm-vendor +# +# @return - The vendor of the current VM +# ------------------------------------------------------------------------------ +java-pkg_get-vm-vendor() { + debug-print-function ${FUNCNAME} $* + + local vm="$(java-pkg_get-current-vm)" + vm="${vm/-*/}" + echo "${vm}" +} + +# ------------------------------------------------------------------------------ +# @internal-function java-pkg_get-vm-version +# +# @return - The version of the current VM +# ------------------------------------------------------------------------------ +java-pkg_get-vm-version() { + debug-print-function ${FUNCNAME} $* + + java-config -g PROVIDES_VERSION +} + +# ------------------------------------------------------------------------------ +# @internal-function java-pkg_switch-vm +# +# Switch VM if we're allowed to (controlled by JAVA_PKG_ALLOW_VM_CHANGE), and +# verify that the current VM is sufficient. +# Setup the environment for the VM being used. +# ------------------------------------------------------------------------------ +java-pkg_switch-vm() { + debug-print-function ${FUNCNAME} $* + + if java-pkg_needs-vm; then + # Use the VM specified by JAVA_PKG_FORCE_VM + if [[ -n "${JAVA_PKG_FORCE_VM}" ]]; then + # If you're forcing the VM, I hope you know what your doing... + debug-print "JAVA_PKG_FORCE_VM used: ${JAVA_PKG_FORCE_VM}" + export GENTOO_VM="${JAVA_PKG_FORCE_VM}" + # if we're allowed to switch the vm... + elif [[ "${JAVA_PKG_ALLOW_VM_CHANGE}" == "yes" ]]; then + debug-print "depend-java-query: NV_DEPEND: ${JAVA_PKG_NV_DEPEND:-${DEPEND}}" + GENTOO_VM="$(depend-java-query --get-vm "${JAVA_PKG_NV_DEPEND:-${DEPEND}}")" + if [[ -z "${GENTOO_VM}" || "${GENTOO_VM}" == "None" ]]; then + eerror "Unable to determine VM for building from dependencies:" + echo "NV_DEPEND: ${JAVA_PKG_NV_DEPEND:-${DEPEND}}" + die "Failed to determine VM for building." + else + export GENTOO_VM + fi + # otherwise just make sure the current VM is sufficient + else + java-pkg_ensure-vm-version-sufficient + fi + debug-print "Using: $(java-config -f)" + + java-pkg_setup-vm + + export JAVA=$(java-config --java) + export JAVAC=$(java-config --javac) + JAVACFLAGS="$(java-pkg_javac-args)" + if [[ ${?} != 0 ]]; then + eerror "There was a problem determining JAVACFLAGS: ${JAVACFLAGS}" + die "java-pkg_javac-args failed" + fi + [[ -n ${JAVACFLAGS_EXTRA} ]] && JAVACFLAGS="${JAVACFLAGS_EXTRA} ${JAVACFLAGS}" + + export JAVACFLAGS + + export JAVA_HOME="$(java-config -g JAVA_HOME)" + export JDK_HOME=${JAVA_HOME} + + # Setup GCJ environment for packages that use gcj directly + java-pkg_native_init_ + + #TODO If you know a better solution let us know. + java-pkg_append_ LD_LIBRARY_PATH "$(java-config -g LDPATH)" + + local tann="${T}/announced-vm" + # With the hooks we should only get here once from pkg_setup but better safe than sorry + # if people have for example modified eclasses some where + if [[ -n "${JAVA_PKG_DEBUG}" ]] || [[ ! -f "${tann}" ]] ; then + einfo "Using: $(java-config -f)" + [[ ! -f "${tann}" ]] && touch "${tann}" + fi + + else + [[ -n "${JAVA_PKG_DEBUG}" ]] && ewarn "!!! This package inherits java-pkg but doesn't depend on a JDK. -bin or broken dependency!!!" + fi +} + +# ------------------------------------------------------------------------------ +# @internal-function java-pkg_gcjflags +# +# sanitze CFLAGS for GCJ native. +# ------------------------------------------------------------------------------ +java-pkg_gcjflags() { + OPTIMIZE_CFLAGS=${CFLAGS} + strip-flags + filter-flags "-ftree-loop-distribution -ftree-vectorize" + filter-flags "-D*" + replace-flags "-O?" "-O1" + append-flags -w + + filter-ldflags -Wl,--as-needed --as-needed +} + +# ------------------------------------------------------------------------------ +# @internal-function java-pkg_native-tc-abi +# +# Return libgcj ABI version for given GCJ profile +# @param $1 - gcc-config profile (optional) +# @return 0 - +# ------------------------------------------------------------------------------ +java-pkg_native-tc-abi() { + local gccbin="$(gcc-config -B ${1})" + echo "$(ls -l $(${gccbin}/gcj -print-file-name=libgcj.so) \ + | sed -r 's:.*\.so\.([0-9]*).*$:\1:')" + return "${?}" +} + +# ------------------------------------------------------------------------------ +# @internal-function java-pkg_native-jdk-abi +# +# Return libgcj ABI version for given java-config VM or current system VM if +# not specified. +# @param $1 - java-config VM (optional) +# @return - ABI version +# ------------------------------------------------------------------------------ +java-pkg_native-jdk-abi() { + local java_vm + [[ -z ${1} ]] && java_vm=$(java-config -f) + echo "$(java-config --select-vm ${1} -O | sed -r 's:.*-([0-9]*)$:\1:')" + return "${?}" +} + +# ------------------------------------------------------------------------------ +# @internal-function java-pkg_native-match-jdk-with-tc +# +# Find a matching gcj-jdk for a given libgcc ABI +# +# @param $1 - libgcj ABI version +# @return - gcj-jdk version +# ------------------------------------------------------------------------------ +java-pkg_native-match-jdk-with-tc() { + local libgcj_soversion=${1} + + for gcj_jdk_slot in $(java-config -L \ + | grep gcj-jdk | sed -r \ + 's/.*gcj-jdk-(.*)]/\1/g'); do + if [[ "$(java-pkg_native-tc-abi ${libgcj_soversion})" == \ + "$(java-pkg_native-jdk-abi \ + gcj-jdk-${gcj_jdk_slot})" ]]; then + gcj_java_home=$(java-config \ + --select-vm gcj-jdk-${gcj_jdk_slot} -O) + echo "gcj-jdk-${gcj_jdk_slot}" + return "0" + fi + done + + return "1" +} + +# ------------------------------------------------------------------------------ +# @internal-function java-pkg_native-check-gcj +# +# For a given GCJ profile, check for existance of gcj and gcj-dbtool +# binaries, then compare against installed gcj-jdk(s) if "any" flag is unset +# +# @param $1 - gcc-config profile +# @param $2 - "any" flag (optional) +# @return - 0 on success, otherwise 1 +# ------------------------------------------------------------------------------ +java-pkg_native-check-gcj() +{ + local gccbin=$(gcc-config -B ${1}) + local any=${2} + if ( [[ -x ${gccbin}/gcj ]] && + [[ -x ${gccbin}/gcj-dbtool ]] ); then + if [[ ${any} == "1" ]]; then + return 0; + else + if (java-pkg_native-match-jdk-with-tc \ + ${gcc_profile} >/dev/null); then + return 0; + fi + fi + fi + return 1; +} + +# ------------------------------------------------------------------------------ +# @internal-function java-pkg_find-supported-gcj +# +# Find a suitable GCJ profile (this is a recursive function) +# +# @param $1 - any flag (used for recursion) +# ------------------------------------------------------------------------------ +java-pkg_native-find-supported-gcj() { + local gccbin gcc_profile + local any=${1} try_current=1 + if ! [[ $(echo "$(java-config -L)" | grep 'gcj-jdk') ]]; then + any=1 + fi + + # First check preferred gcc-config profile + if ! [[ -z ${JAVA_PKG_NATIVE_GCJ_PROFILE} ]]; then + if [[ "${JAVA_PKG_NATIVE_GCJ_PROFILE}" == "highest" ]]; then + try_current=0; + else + gcc_profile="${JAVA_PKG_NATIVE_GCJ_PROFILE}" + if (java-pkg_native-check-gcj "${gcc_profile}" "${any}"); then + echo "${gcc_profile}"; + return 0; + fi + fi + fi + + # Next check current gcc-config profile + if [[ ${try_current} == "1" ]]; then + gcc_profile="$(gcc-config -c)" + if (java-pkg_native-check-gcj "${gcc_profile}" "${any}"); then + echo "${gcc_profile}"; + return 0; + fi + fi + # Now check all gcc-config profiles + for gcc_profile in $(ls -1r /etc/env.d/gcc/${CHOST}-*); do + if (java-pkg_native-check-gcj "${gcc_profile}" "${any}"); then + echo "${gcc_profile}"; + return 0; + fi + done + + # If we get here and no matches have occurred (yet), try any, or + # fail if "any" flag already set + if [[ "${any}" == "1" ]]; then + return 1; + else + gcc_profile="$(java-pkg_native-find-supported-gcj 1)" + if [[ "${?}" == "0" ]]; then + echo ${gcc_profile} + return 0; + fi + fi + + # if we get here all has failed (no gcj available) + return 1; +} + +# ------------------------------------------------------------------------------ +# @internal-function java-pkg_native-select-current-jdk-gcj +# +# Attempt to use the current gcj-jdk as the target for native builds +# +# ------------------------------------------------------------------------------ +java-pkg_native-select-current-jdk-gcj() { + local selected_jdk=$(java-config -f) + local gcc_profile=$(gcc-config -c) + local gcc_profile_ + [[ $(echo "$(java-config -f)" | grep 'gcj-jdk') ]] || return 1 + + if [[ "$(java-pkg_native-tc-abi ${gcc_profile})" != \ + "$(java-pkg_native-jdk-abi ${selected_jdk})" ]]; then + gcc_profile_=${gcc_profile} + gcc_profile=${CHOST}-$(java-config -g VERSION | \ + sed -r -e 's:GCJ (.*[0-9]).*$:\1:g' -e 's/_/-/g') + if (java-pkg_native-check-gcj ${gcc_profile} 1); then + # The selected gcj-jdk uses a different ABI version + # than the currently selected gcc + echo "${gcc_profile}"; + return 0; + else + ewarn "The corresponding gcc/gcj profile for the" + ewarn "selected gcj-jdk seems to be missing!" + return 1; + fi + fi + echo "${gcc_profile}"; + return 0 +} + +# ------------------------------------------------------------------------------ +# @internal-function java-pkg_native_set-env +# +# Set native build environment for given gcc +# @param $1 - gcc gcj profile +# ------------------------------------------------------------------------------ +java-pkg_native-set-env() { + # TODO: check cross compiling is working... + local gcj_java_home gcc_branch_ver gcc_config_ver gcc_slot + local gcc_profile="${1}" + + if !(java-pkg_native-check-gcj ${gcc_profile} 1); then + ewarn "There is a problem with the selected gcj profile" + return 1; + fi + local libgcj_abi_version="$(java-pkg_native-tc-abi ${gcc_profile})" + local gccbin="$(gcc-config -B ${gcc_profile})" + local gcj_target="$($gccbin/gcj -dumpmachine)" + [[ -z gcc_target ]] && die "gcj binary missing for ${gcc_profile}" + + # Construct the gcj-jdk JAVA_HOME in the same way as the gcj-jdk + # ebuild just in case it's not installed yet + gcc_branch_ver="$(get_version_component_range 1-2 ${gcc_profile/$gcj_target-})" + gcc_config_ver="${gcc_config_ver:-$(replace_version_separator 3 '-' ${gcc_profile/$gcj_target-})}" + if use multislot ; then + gcc_slot="${gcj_target}-${gcc_config_ver}" + else + gcc_slot="${gcc_branch_ver}" + fi + gcj_java_home="/usr/$(get_libdir)/gcj-jdk-${gcc_slot}-${libgcj_abi_version}" + [[ -d "${gcj_java_home}" ]] || die "dev-java/gcj-jdk-${gcc_config_ver} needs to be installed to compile native packages with gcj" + JAVA_PKG_NATIVE_BIN_FLAGS="-Wl,-rpath ${gcj_java_home}/lib/${ABI} -Wl,-Bsymbolic -findirect-dispatch -fjni" + + export GCJ="${gccbin}/gcj" + export DBTOOL="${gccbin}/gcj-dbtool" + + return 0 +} + +# ------------------------------------------------------------------------------ +# @internal-function java-pkg_native_init_ +# +# Check for issues +# +# @return 0 - procede with native build +# @return 1 - skip native build +# ------------------------------------------------------------------------------ +java-pkg_native_init_() { + local skip_cflags="${1}" + if ! has gcj "${IUSE}" || ! use gcj ; then + return 1 + fi + + # The libgcj ABI has a habbit of changing incompatibly between + # versions, if the current java-jdk is set to gcj-jdk, compare the + # ABI version with the current gcc libgcj ABI version, if it differs + # use the gcj matching the java-jdk version, otherwise next use the + # highest version installed gcc first with gcj-jdk support, then + # without. + + # Is the current system VM gcj-jdk? Is GCJ available? + # If so, use the gcc-config profile for that gcj-jdk + #einfo "Checking current java-config system VM profile ..." + gcc_profile=$(java-pkg_native-select-current-jdk-gcj) + # If not, find a suitable version + if [[ "${?}" != "0" ]]; then + #einfo "Unable to match a gcc-config profile to current java-config system VM" + #einfo "Attempting to determine suitable gcc-config profile for this system ..." + gcc_profile=$(java-pkg_native-find-supported-gcj) + else + einfo "System Java VM is gcj-jdk. Using selected profile." + fi + if [[ "${?}" != "0" ]]; then + # Do we want to die here? + ewarn "java native tools unusable!" + return 1 + fi + + einfo "Using gcc-config profile: ${gcc_profile} to build native pkg ..." + + java-pkg_native-set-env ${gcc_profile} + + return 0 +} + +# ------------------------------------------------------------------------------ +# @ebuild-function java-pkg_gen-native-cp +# +# Set include and library paths for native build. +# +# Example: +# java-pkg_gen-native-cp junit gnu-crypto ... +# +# @param $@ - space-separated list of packages +# ------------------------------------------------------------------------------ +java-pkg_gen-native-cp() { + java-pkg_native_init_ || return 0 + + java-pkg_gcjflags + + local pkg cp item lib + for pkg in ${@} ; do + cp="$(java-config --classpath=${pkg})" + for item in ${cp//:/ } ; do + if [[ ( -f "${item}" ) && ( ".jar" == "${item: -4:4}" ) ]] ; then + lib="$(dirname ${item})/lib$(basename ${item}).so" + [ ! -f "${lib}" ] && die "Native library ${lib} from ${pkg} missing!" + JAVA_PKG_NATIVE_INC="${JAVA_PKG_NATIVE_INC} -I${item}" + JAVA_PKG_NATIVE_LIB="${JAVA_PKG_NATIVE_LIB} ${lib}" + fi + done + done +} + +# ------------------------------------------------------------------------------ +# @ebuild-function java-pkg_donative +# +# Compile Java source to native. +# +# Example: +# java-pkg_donative src/java/* ... +# Where '*' is org or com et cetera +# +# @param $@ - path to java source(s) +# ------------------------------------------------------------------------------ +java-pkg_donative() { + java-pkg_native_init_ || return 0 + + java-pkg_gcjflags + + einfo "Compile Java source to native ..." + + local buildpath="${S}/build/native" + mkdir -p "${buildpath}" + + local path + for path in ${@} ; do + cp -a "${path}" "${buildpath}" + done + + pushd "${buildpath}" >/dev/null || die "no native build there!" + + local file + for file in $(find -type f -name '*.java' | cut -c3-) ; do + echo ${GCJ} -c -g0 ${OPTIMIZE_CFLAGS} -Wno-deprecated \ + ${JAVA_PKG_NATIVE_INC} ${file} -o ${file/\.java/.o} + ${GCJ} -c -g0 ${OPTIMIZE_CFLAGS} -Wno-deprecated \ + ${JAVA_PKG_NATIVE_INC} ${file} -o ${file/\.java/.o} \ + || die "java native compile failed! (${file})" + done + + # Any other resource formats out there? + # .properties, .rsc, .xml + + for file in $(find -type f \( -name '*.properties' -o -name '*.rsc' -o -name '*.xml' \) | cut -c3-) ; do + echo ${GCJ} -c -g0 ${OPTIMIZE_CFLAGS} -Wno-deprecated \ + --resource ${file} ${file} -o ${file}.o + ${GCJ} -c -g0 ${OPTIMIZE_CFLAGS} -Wno-deprecated \ + --resource ${file} ${file} -o ${file}.o \ + || die "java native compile failed! (${file})" + done + + popd >/dev/null +} + +# ------------------------------------------------------------------------------ +# @ebuild-function java-pkg_donative-bin +# +# Create native binary. +# +# Example: +# java-pkg_donative-bin com.example.my.Main +# java-pkg_donative-bin com.example.my.Main +# +# @param $1 - main function to call on execution of the native binary +# @param $2 - optional: the name of resulting binary +# path to jar file to turn native +# ------------------------------------------------------------------------------ +java-pkg_donative-bin() { + java-pkg_native_init_ || return 0 + + java-pkg_gcjflags + + [ -z "${1}" ] && die "set the main function to call for the binary!" + + if [ ".jar" == "${2: -4:4}" ] ; then + pushd "${S}" >/dev/null + + echo ${GCJ} --main=${1} -o ${2/\.jar} ${2} \ + ${JAVA_PKG_NATIVE_BIN_FLAGS} ${CFLAGS} ${LDFLAGS} \ + ${JAVA_PKG_NATIVE_INC} ${JAVA_PKG_NATIVE_LIB} + ${GCJ} --main=${1} -o ${2/\.jar} ${2} \ + ${JAVA_PKG_NATIVE_BIN_FLAGS} ${CFLAGS} ${LDFLAGS} \ + ${JAVA_PKG_NATIVE_INC} ${JAVA_PKG_NATIVE_LIB} \ + || die "build of native binary failed! (from jar)" + else + pushd "${S}/build/native" >/dev/null || die "no native build there!" + + local file files + for file in $(find -type f -name '*.o' | cut -c3-) ; do + files="${files} ${file}" + done + + local bin="" + if [ -n "${2}" ] ; then + bin="${2}" + elif [[ ( -n "${SLOT}" ) && ( "${SLOT}" != "0" ) ]] ; then + bin="${PN}-native-${SLOT}" + else + bin="${PN}-native" + fi + + echo ${GCJ} ${JAVA_PKG_NATIVE_BIN_FLAGS} \ + --main=${1} -o ../${bin} ${LDFLAGS} \ + ${JAVA_PKG_NATIVE_LIB} ... + ${GCJ} ${JAVA_PKG_NATIVE_BIN_FLAGS} \ + --main=${1} -o ../${bin} ${LDFLAGS} \ + ${JAVA_PKG_NATIVE_LIB} ${files} \ + || die "build of native binary failed! (from source)" + fi + + popd >/dev/null +} + +# ------------------------------------------------------------------------------ +# @ebuild-function java-pkg_skip-cachejar +# +# Skip caching of Jar files that match pattern. +# +# Example: +# java-pkg_skip-cachejar org.eclipse.jdt.core_ org.eclipse.jdt.apt +# java-pkg_skip-cachejar 2000 org.eclipse.jdt.ui_ +# +# param $1 - optional: memory size to check +# param $@ - pattern of Jar files to skip +# ------------------------------------------------------------------------------ +java-pkg_skip-cachejar() { + java-pkg_native_init_ || return 0 + + java-pkg_gcjflags + + if [[ ${1} =~ ^[0-9]+$ ]] ; then + CHECKREQS_MEMORY="${1}" + check_reqs_conditional && return 0 + shift + fi + + JAVA_PKG_NATIVE_SKIP="${JAVA_PKG_NATIVE_SKIP} ${@}" +} + +# ------------------------------------------------------------------------------ +# @ebuild-function java-pkg_cachejar +# +# Create native library from jar. +# For packages not using java-pkg_dojar. +# +# Example: +# use gcj && java-pkg_cachejar +# +# @param $@ - none +# ------------------------------------------------------------------------------ +java-pkg_cachejar() { + java-pkg_native_init_ || return 0 + + java-pkg_gcjflags + + pushd "${D}" >/dev/null || die "This function is for src_install!" + + local jars jar + for jar in $(find -type f -name '*.jar' | cut -c2-) ; do + jars="${jars} ${jar}" + done + + java-pkg_cachejar_ "${jars}" + + popd >/dev/null +} + +# ------------------------------------------------------------------------------ +# @internal-function java-pkg_cachejar_ +# +# Create native library from jar +# ------------------------------------------------------------------------------ +java-pkg_cachejar_() { + java-pkg_native_init_ || return 0 + + java-pkg_gcjflags + + local jars + [ ${#} -lt 1 ] \ + && jars="${JAVA_PKG_CLASSPATH//:/ }" \ + || jars="${@}" + + local item jar to + for jar in ${jars} ; do + for item in ${JAVA_PKG_NATIVE_SKIP} ; do + if [[ ${jar} =~ ${item} ]] ; then + ewarn "skip: ${jar}" + jar="no_native_lib" + break + fi + done + + to="$(dirname ${jar})/${ABI}/lib$(basename ${jar}).so" + mkdir -p "${D}$(dirname ${to})" + if [[ ( -f "${D}${jar}" ) && ( ".jar" == "${jar: -4:4}" ) && ( ! -e "${D}${to}" ) ]] ; then + echo ${GCJ} ${JAVA_PKG_NATIVE_CACHE_FLAGS} \ + -g0 ${CFLAGS} -o ${to} ${jar} + if ! ${GCJ} ${JAVA_PKG_NATIVE_CACHE_FLAGS} \ + -g0 ${CFLAGS} -o ${D}${to} ${D}${jar} ; then + ewarn "${to} build failed -> skipping" + fi + fi + done +} +# ------------------------------------------------------------------------------ +# @internal-function java-pkg_do_reg-cachejar_ +# +# Register native library for each ABI +# ------------------------------------------------------------------------------ +java-pkg_do_reg-cachejar_() { + # Create new database? + if [ ! -e "${JAVA_PKG_NATIVE_CLASSMAP}" ] ; then + einfo "Create new database ..." + [ -d "$(dirname ${JAVA_PKG_NATIVE_CLASSMAP})" ] \ + || mkdir -p "$(dirname ${JAVA_PKG_NATIVE_CLASSMAP})" + ${DBTOOL} -n ${JAVA_PKG_NATIVE_CLASSMAP} + fi + + einfo "Register native library in database (${JAVA_PKG_NATIVE_CLASSMAP}) ..." + + local jar to + for jar in ${JAVA_PKG_CLASSPATH//:/ } ; do + to="$(dirname ${jar})/${ABI}/lib$(basename ${jar}).so" + if [[ ( -f "${jar}" ) && ( ".jar" == "${jar: -4:4}" ) && ( -f "${to}" ) ]] ; then + einfo "library: ${to}" + ${DBTOOL} -a ${JAVA_PKG_NATIVE_CLASSMAP} ${jar} ${to} \ + || die "failed to register jar file" + fi + done +} + +# ------------------------------------------------------------------------------ +# @internal-function java-pkg_reg-cachejar_ +# +# Register native library +# ------------------------------------------------------------------------------ +java-pkg_reg-cachejar_() { + java-pkg_native_init_ || return 0 + + java-pkg_gcjflags + + [ -z "${JAVA_PKG_CLASSPATH}" ] && return 0 + + # For each ABI: + local abilist="" + local libgcj_abi_version="$(java-pkg_native-tc-abi ${gcc_profile})" + if has_multilib_profile ; then + abilist=$(get_install_abis) + fi + if [[ -n ${abilist} ]] ; then + OABI=${ABI} + for ABI in ${abilist} ; do + export ABI + # The ABI for libgcj must match the native binary/library + JAVA_PKG_NATIVE_CLASSMAP="/usr/share/java/gcj/${ABI}/${libgcj_abi_version}/classmap.db" + java-pkg_do_reg-cachejar_ + done + ABI=${OABI} + unset OABI + return 0 + else + JAVA_PKG_NATIVE_CLASSMAP="/usr/share/java/gcj/${libgcj_abi_version}/classmap.db" + java-pkg_do_reg-cachejar_ + fi + +} + +# ------------------------------------------------------------------------------ +# @internal-function java-pkg_die +# +# Enhanced die for Java packages, which displays some information that may be +# useful for debugging bugs on bugzilla. +# ------------------------------------------------------------------------------ +#register_die_hook java-pkg_die +if ! hasq java-pkg_die ${EBUILD_DEATH_HOOKS}; then + EBUILD_DEATH_HOOKS="${EBUILD_DEATH_HOOKS} java-pkg_die" +fi + +java-pkg_die() { + echo "!!! When you file a bug report, please include the following information:" >&2 + echo "GENTOO_VM=${GENTOO_VM} CLASSPATH=\"${CLASSPATH}\" JAVA_HOME=\"${JAVA_HOME}\"" >&2 + echo "JAVACFLAGS=\"${JAVACFLAGS}\" COMPILER=\"${GENTOO_COMPILER}\"" >&2 + echo "and of course, the output of emerge --info" >&2 +} + + +# TODO document +# List jars in the source directory, ${S} +java-pkg_jar-list() { + if [[ -n "${JAVA_PKG_DEBUG}" ]]; then + einfo "Linked Jars" + find "${S}" -type l -name '*.jar' -print0 | xargs -0 -r -n 500 ls -ald | sed -e "s,${WORKDIR},\${WORKDIR}," + einfo "Jars" + find "${S}" -type f -name '*.jar' -print0 | xargs -0 -r -n 500 ls -ald | sed -e "s,${WORKDIR},\${WORKDIR}," + einfo "Classes" + find "${S}" -type f -name '*.class' -print0 | xargs -0 -r -n 500 ls -ald | sed -e "s,${WORKDIR},\${WORKDIR}," + fi +} + +# ------------------------------------------------------------------------------ +# @internal-function java-pkg_verify-classes +# +# Verify that the classes were compiled for the right source / target. Dies if +# not. +# @param $1 (optional) - the file to check, otherwise checks whole ${D} +# ------------------------------------------------------------------------------ +java-pkg_verify-classes() { + #$(find ${D} -type f -name '*.jar' -o -name '*.class') + + local version_verify="/usr/bin/class-version-verify.py" + + if [[ ! -x "${version_verify}" ]]; then + version_verify="/usr/$(get_libdir)/javatoolkit/bin/class-version-verify.py" + fi + + if [[ ! -x "${version_verify}" ]]; then + ewarn "Unable to perform class version checks as" + ewarn "class-version-verify.py is unavailable" + ewarn "Please install dev-java/javatoolkit." + return + fi + + local target=$(java-pkg_get-target) + local result + local log="${T}/class-version-verify.log" + if [[ -n "${1}" ]]; then + ${version_verify} -v -t ${target} "${1}" > "${log}" + result=$? + else + ebegin "Verifying java class versions (target: ${target})" + ${version_verify} -v -t ${target} -r "${D}" > "${log}" + result=$? + eend ${result} + fi + [[ -n ${JAVA_PKG_DEBUG} ]] && cat "${log}" + if [[ ${result} != 0 ]]; then + eerror "Incorrect bytecode version found" + [[ -n "${1}" ]] && eerror "in file: ${1}" + eerror "See ${log} for more details." + die "Incorrect bytecode found" + fi +} + +# ---------------------------------------------------------------------------- +# @internal-function java-pkg_ensure-dep +# Check that a package being used in jarfrom, getjars and getjar is contained +# within DEPEND or RDEPEND. +# @param $1 - empty - check both vars; "runtime" or "build" - check only +# RDEPEND, resp. DEPEND +# @param $2 - Package name and slot. + +java-pkg_ensure-dep() { + debug-print-function ${FUNCNAME} $* + + local limit_to="${1}" + local target_pkg="${2}" + local dev_error="" + + # remove the version specification, which may include globbing (* and [123]) + local stripped_pkg=$(echo "${target_pkg}" | sed \ + 's/-\([0-9*]*\(\[[0-9]*\]\)*\)*\(\.\([0-9*]*\(\[[0-9]*\]\)*\)*\)*$//') + + debug-print "Matching against: ${stripped_pkg}" + + if [[ ${limit_to} != runtime && ! ( "${DEPEND}" =~ "$stripped_pkg" ) ]]; then + dev_error="The ebuild is attempting to use ${target_pkg} that is not" + dev_error="${dev_error} declared in DEPEND." + if is-java-strict; then + eerror "${dev_error}" + die "${dev_error}" + elif [[ ${BASH_SUBSHELL} = 0 ]]; then + eerror "${dev_error}" + elog "Because you have this package installed the package will" + elog "build without problems, but please report this to" + elog "http://bugs.gentoo.org" + fi + fi + + if [[ ${limit_to} != build ]]; then + if [[ ! ( ${RDEPEND} =~ "${stripped_pkg}" ) ]]; then + if [[ ! ( ${PDEPEND} =~ "${stripped_pkg}" ) ]]; then + dev_error="The ebuild is attempting to use ${target_pkg}," + dev_error="${dev_error} without specifying --build-only, that is not declared in RDEPEND" + dev_error="${dev_error} or PDEPEND." + if is-java-strict; then + eerror "${dev_error}" + die "${dev_error}" + elif [[ ${BASH_SUBSHELL} = 0 ]]; then + eerror "${dev_error}" + elog "The package will build without problems, but may fail to run" + elog "if you don't have ${target_pkg} installed, so please report" + elog "this to http://bugs.gentoo.org" + fi + fi + fi + fi +} + +# ------------------------------------------------------------------------------ +# @section-end internal +# ------------------------------------------------------------------------------ + +java-pkg_check-phase() { + local phase=${1} + local funcname=${FUNCNAME[1]} + if [[ ${EBUILD_PHASE} != ${phase} ]]; then + local msg="${funcname} used outside of src_${phase}" + java-pkg_announce-qa-violation "${msg}" + fi +} + +java-pkg_check-versioned-jar() { + local jar=${1} + + if [[ ${jar} =~ ${PV} ]]; then + java-pkg_announce-qa-violation "installing versioned jar '${jar}'" + fi +} + +java-pkg_check-jikes() { + if hasq jikes ${IUSE}; then + java-pkg_announce-qa-violation "deprecated USE flag 'jikes' in IUSE" + fi +} + +java-pkg_announce-qa-violation() { + local nodie + if [[ ${1} == "--nodie" ]]; then + nodie="true" + shift + fi + echo "Java QA Notice: $@" >&2 + increment-qa-violations + [[ -z "${nodie}" ]] && is-java-strict && die "${@}" +} + +increment-qa-violations() { + let "JAVA_PKG_QA_VIOLATIONS+=1" + export JAVA_PKG_QA_VIOLATIONS +} + +is-java-strict() { + [[ -n ${JAVA_PKG_STRICT} ]] + return $? +} + + +# ------------------------------------------------------------------------------ +# @eclass-end +# ------------------------------------------------------------------------------ diff --git a/eclass/linux-mod.eclass b/eclass/linux-mod.eclass new file mode 100644 index 0000000..c07e95f --- /dev/null +++ b/eclass/linux-mod.eclass @@ -0,0 +1,813 @@ +# Copyright 1999-2004 Gentoo Foundation +# Distributed under the terms of the GNU General Public License v2 +# $Header: /var/cvsroot/gentoo-x86/eclass/linux-mod.eclass,v 1.99 2010/03/31 19:33:16 robbat2 Exp $ + +# Author(s): John Mylchreest , +# Stefan Schweizer +# Maintainer: kernel-misc@gentoo.org +# +# Please direct your bugs to the current eclass maintainer :) + +# @ECLASS: linux-mod.eclass +# @MAINTAINER: +# kernel-misc@gentoo.org +# @BLURB: It provides the functionality required to install external modules against a kernel source tree. +# @DESCRIPTION: +# This eclass is used to interface with linux-info.eclass in such a way +# to provide the functionality and initial functions +# required to install external modules against a kernel source +# tree. + +# A Couple of env vars are available to effect usage of this eclass +# These are as follows: + +# @ECLASS-VARIABLE: KERNEL_DIR +# @DESCRIPTION: +# A string containing the directory of the target kernel sources. The default value is +# "/usr/src/linux" + +# @ECLASS-VARIABLE: ECONF_PARAMS +# @DESCRIPTION: +# It's a string containing the parameters to pass to econf. +# If this is not set, then econf isn't run. + +# @ECLASS-VARIABLE: BUILD_PARAMS +# @DESCRIPTION: +# It's a string with the parameters to pass to emake. + +# @ECLASS-VARIABLE: BUILD_TARGETS +# @DESCRIPTION: +# It's a string with the build targets to pass to make. The default value is "clean modules" + +# @ECLASS-VARIABLE: MODULE_NAMES +# @DESCRIPTION: +# It's a string containing the modules to be built automatically using the default +# src_compile/src_install. It will only make ${BUILD_TARGETS} once in any directory. +# +# The structure of each MODULE_NAMES entry is as follows: +# +# modulename(libdir:srcdir:objdir) +# +# where: +# +# modulename = name of the module file excluding the .ko +# libdir = place in system modules directory where module is installed (by default it's misc) +# srcdir = place for ebuild to cd to before running make (by default it's ${S}) +# objdir = place the .ko and objects are located after make runs (by default it's set to srcdir) +# +# To get an idea of how these variables are used, here's a few lines +# of code from around line 540 in this eclass: +# +# einfo "Installing ${modulename} module" +# cd ${objdir} || die "${objdir} does not exist" +# insinto /lib/modules/${KV_FULL}/${libdir} +# doins ${modulename}.${KV_OBJ} || die "doins ${modulename}.${KV_OBJ} failed" +# +# For example: +# MODULE_NAMES="module_pci(pci:${S}/pci:${S}) module_usb(usb:${S}/usb:${S})" +# +# what this would do is +# +# cd "${S}"/pci +# make ${BUILD_PARAMS} ${BUILD_TARGETS} +# cd "${S}" +# insinto /lib/modules/${KV_FULL}/pci +# doins module_pci.${KV_OBJ} +# +# cd "${S}"/usb +# make ${BUILD_PARAMS} ${BUILD_TARGETS} +# cd "${S}" +# insinto /lib/modules/${KV_FULL}/usb +# doins module_usb.${KV_OBJ} + +# There is also support for automated modprobe.d/modules.d(2.4) file generation. +# This can be explicitly enabled by setting any of the following variables. + +# @ECLASS-VARIABLE: MODULESD__ENABLED +# @DESCRIPTION: +# This is used to disable the modprobe.d/modules.d file generation otherwise the file will be +# always generated (unless no MODULESD__* variable is provided). Set to "no" to disable +# the generation of the file and the installation of the documentation. + +# @ECLASS-VARIABLE: MODULESD__EXAMPLES +# @DESCRIPTION: +# This is a bash array containing a list of examples which should +# be used. If you want us to try and take a guess set this to "guess". +# +# For each array_component it's added an options line in the modprobe.d/modules.d file +# +# options array_component +# +# where array_component is " options" (see modprobe.conf(5)) + +# @ECLASS-VARIABLE: MODULESD__ALIASES +# @DESCRIPTION: +# This is a bash array containing a list of associated aliases. +# +# For each array_component it's added an alias line in the modprobe.d/modules.d file +# +# alias array_component +# +# where array_component is "wildcard " (see modprobe.conf(5)) + +# @ECLASS-VARIABLE: MODULESD__ADDITIONS +# @DESCRIPTION: +# This is a bash array containing a list of additional things to +# add to the bottom of the file. This can be absolutely anything. +# Each entry is a new line. + +# @ECLASS-VARIABLE: MODULESD__DOCS +# @DESCRIPTION: +# This is a string list which contains the full path to any associated +# documents for . These files are installed in the live tree. + +# @ECLASS-VARIABLE: KV_OBJ +# @DESCRIPTION: +# It's a read-only variable. It contains the extension of the kernel modules. + +# The order of these is important as both of linux-info and eutils contain +# set_arch_to_kernel and set_arch_to_portage functions and the ones in eutils +# are deprecated in favor of the ones in linux-info. +# See http://bugs.gentoo.org/show_bug.cgi?id=127506 + +inherit eutils linux-info multilib +case "${EAPI:-0}" in + 2) + EXPORT_FUNCTIONS pkg_setup pkg_preinst pkg_postinst src_install src_configure src_compile pkg_postrm + ;; + *) + EXPORT_FUNCTIONS pkg_setup pkg_preinst pkg_postinst src_install src_compile pkg_postrm + ;; +esac +IUSE="kernel_linux" +SLOT="0" +DESCRIPTION="Based on the $ECLASS eclass" +RDEPEND="kernel_linux? ( virtual/modutils )" +DEPEND="${RDEPEND} + sys-apps/sed + kernel_linux? ( virtual/linux-sources )" + +# eclass utilities +# ---------------------------------- + +check_vermagic() { + debug-print-function ${FUNCNAME} $* + + local curr_gcc_ver=$(gcc -dumpversion) + local tmpfile old_chost old_gcc_ver result=0 + + tmpfile=`find "${KV_DIR}/" -iname "*.o.cmd" -exec grep usr/lib/gcc {} \; -quit` + tmpfile=${tmpfile//*usr/lib} + tmpfile=${tmpfile//\/include*} + old_chost=${tmpfile//*gcc\/} + old_chost=${old_chost//\/*} + old_gcc_ver=${tmpfile//*\/} + + if [[ -z ${old_gcc_ver} || -z ${old_chost} ]]; then + ewarn "" + ewarn "Unable to detect what version of GCC was used to compile" + ewarn "the kernel. Build will continue, but you may experience problems." + elif [[ ${curr_gcc_ver} != ${old_gcc_ver} ]]; then + ewarn "" + ewarn "The version of GCC you are using (${curr_gcc_ver}) does" + ewarn "not match the version of GCC used to compile the" + ewarn "kernel (${old_gcc_ver})." + result=1 + elif [[ ${CHOST} != ${old_chost} ]]; then + ewarn "" + ewarn "The current CHOST (${CHOST}) does not match the chost" + ewarn "used when compiling the kernel (${old_chost})." + result=1 + fi + + if [[ ${result} -gt 0 ]]; then + ewarn "" + ewarn "Build will not continue, because you will experience problems." + ewarn "To fix this either change the version of GCC you wish to use" + ewarn "to match the kernel, or recompile the kernel first." + die "GCC Version Mismatch." + fi +} + +# @FUNCTION: use_m +# @RETURN: true or false +# @DESCRIPTION: +# It checks if the kernel version is greater than 2.6.5. +use_m() { + debug-print-function ${FUNCNAME} $* + + # if we haven't determined the version yet, we need too. + get_version; + + # if the kernel version is greater than 2.6.6 then we should use + # M= instead of SUBDIRS= + [ ${KV_MAJOR} -eq 2 -a ${KV_MINOR} -gt 5 -a ${KV_PATCH} -gt 5 ] && \ + return 0 || return 1 +} + +# @FUNCTION: convert_to_m +# @USAGE: /path/to/the/file +# @DESCRIPTION: +# It converts a file (e.g. a makefile) to use M= instead of SUBDIRS= +convert_to_m() { + debug-print-function ${FUNCNAME} $* + + if use_m + then + [ ! -f "${1}" ] && \ + die "convert_to_m() requires a filename as an argument" + ebegin "Converting ${1/${WORKDIR}\//} to use M= instead of SUBDIRS=" + sed -i 's:SUBDIRS=:M=:g' "${1}" + eend $? + fi +} + +# internal function +# +# FUNCTION: update_depmod +# DESCRIPTION: +# It updates the modules.dep file for the current kernel. +update_depmod() { + debug-print-function ${FUNCNAME} $* + + # if we haven't determined the version yet, we need too. + get_version; + + ebegin "Updating module dependencies for ${KV_FULL}" + if [ -r "${KV_OUT_DIR}"/System.map ] + then + depmod -ae -F "${KV_OUT_DIR}"/System.map -b "${ROOT}" -r ${KV_FULL} + eend $? + else + ewarn + ewarn "${KV_OUT_DIR}/System.map not found." + ewarn "You must manually update the kernel module dependencies using depmod." + eend 1 + ewarn + fi +} + +# internal function +# +# FUNCTION: update_modules +# DESCRIPTION: +# It calls the update-modules utility. +update_modules() { + debug-print-function ${FUNCNAME} $* + + if [ -x /sbin/update-modules ] && \ + grep -v -e "^#" -e "^$" "${D}"/etc/modules.d/* >/dev/null 2>&1; then + ebegin "Updating modules.conf" + /sbin/update-modules + eend $? + elif [ -x /sbin/update-modules ] && \ + grep -v -e "^#" -e "^$" "${D}"/etc/modules.d/* >/dev/null 2>&1; then + ebegin "Updating modules.conf" + /sbin/update-modules + eend $? + fi +} + +# internal function +# +# FUNCTION: move_old_moduledb +# DESCRIPTION: +# It updates the location of the database used by the module-rebuild utility. +move_old_moduledb() { + debug-print-function ${FUNCNAME} $* + + local OLDDIR="${ROOT}"/usr/share/module-rebuild/ + local NEWDIR="${ROOT}"/var/lib/module-rebuild/ + + if [[ -f "${OLDDIR}"/moduledb ]]; then + [[ ! -d "${NEWDIR}" ]] && mkdir -p "${NEWDIR}" + [[ ! -f "${NEWDIR}"/moduledb ]] && \ + mv "${OLDDIR}"/moduledb "${NEWDIR}"/moduledb + rm -f "${OLDDIR}"/* + rmdir "${OLDDIR}" + fi +} + +# internal function +# +# FUNCTION: update_moduledb +# DESCRIPTION: +# It adds the package to the /var/lib/module-rebuild/moduledb database used by the module-rebuild utility. +update_moduledb() { + debug-print-function ${FUNCNAME} $* + + local MODULEDB_DIR="${ROOT}"/var/lib/module-rebuild/ + move_old_moduledb + + if [[ ! -f "${MODULEDB_DIR}"/moduledb ]]; then + [[ ! -d "${MODULEDB_DIR}" ]] && mkdir -p "${MODULEDB_DIR}" + touch "${MODULEDB_DIR}"/moduledb + fi + + if ! grep -qs ${CATEGORY}/${PN}-${PVR} "${MODULEDB_DIR}"/moduledb ; then + einfo "Adding module to moduledb." + echo "a:1:${CATEGORY}/${PN}-${PVR}" >> "${MODULEDB_DIR}"/moduledb + fi +} + +# internal function +# +# FUNCTION: remove_moduledb +# DESCRIPTION: +# It removes the package from the /var/lib/module-rebuild/moduledb database used by +# the module-rebuild utility. +remove_moduledb() { + debug-print-function ${FUNCNAME} $* + + local MODULEDB_DIR="${ROOT}"/var/lib/module-rebuild/ + move_old_moduledb + + if grep -qs ${CATEGORY}/${PN}-${PVR} "${MODULEDB_DIR}"/moduledb ; then + einfo "Removing ${CATEGORY}/${PN}-${PVR} from moduledb." + sed -i -e "/.*${CATEGORY}\/${PN}-${PVR}.*/d" "${MODULEDB_DIR}"/moduledb + fi +} + +# @FUNCTION: set_kvobj +# @DESCRIPTION: +# It sets the KV_OBJ variable. +set_kvobj() { + debug-print-function ${FUNCNAME} $* + + if kernel_is 2 6 + then + KV_OBJ="ko" + else + KV_OBJ="o" + fi + # Do we really need to know this? + # Lets silence it. + # einfo "Using KV_OBJ=${KV_OBJ}" +} + +get-KERNEL_CC() { + debug-print-function ${FUNCNAME} $* + + if [[ -n ${KERNEL_CC} ]] ; then + echo "${KERNEL_CC}" + return + fi + + local kernel_cc + if [ -n "${KERNEL_ABI}" ]; then + # In future, an arch might want to define CC_$ABI + #kernel_cc="$(get_abi_CC)" + #[ -z "${kernel_cc}" ] && + kernel_cc="$(tc-getCC $(ABI=${KERNEL_ABI} get_abi_CHOST))" + else + kernel_cc=$(tc-getCC) + fi + echo "${kernel_cc}" +} + +# internal function +# +# FUNCTION: +# USAGE: /path/to/the/modulename_without_extension +# RETURN: A file in /etc/modules.d/ (kernel < 2.6) or /etc/modprobe.d/ (kernel >= 2.6) +# DESCRIPTION: +# This function will generate and install the neccessary modprobe.d/modules.d file from the +# information contained in the modules exported parms. +# (see the variables MODULESD__ENABLED, MODULESD__EXAMPLES, +# MODULESD__ALIASES, MODULESD__ADDITION and MODULESD__DOCS). +# +# At the end the documentation specified with MODULESD__DOCS is installed. +generate_modulesd() { + debug-print-function ${FUNCNAME} $* + + local currm_path currm currm_t t myIFS myVAR + local module_docs module_enabled module_aliases \ + module_additions module_examples module_modinfo module_opts + + for currm_path in ${@} + do + currm=${currm_path//*\/} + currm=$(echo ${currm} | tr '[:lower:]' '[:upper:]') + currm_t=${currm} + while [[ -z ${currm_t//*-*} ]]; do + currm_t=${currm_t/-/_} + done + + module_docs="$(eval echo \${MODULESD_${currm_t}_DOCS})" + module_enabled="$(eval echo \${MODULESD_${currm_t}_ENABLED})" + module_aliases="$(eval echo \${#MODULESD_${currm_t}_ALIASES[*]})" + module_additions="$(eval echo \${#MODULESD_${currm_t}_ADDITIONS[*]})" + module_examples="$(eval echo \${#MODULESD_${currm_t}_EXAMPLES[*]})" + + [[ ${module_aliases} -eq 0 ]] && unset module_aliases + [[ ${module_additions} -eq 0 ]] && unset module_additions + [[ ${module_examples} -eq 0 ]] && unset module_examples + + # If we specify we dont want it, then lets exit, otherwise we assume + # that if its set, we do want it. + [[ ${module_enabled} == no ]] && return 0 + + # unset any unwanted variables. + for t in ${!module_*} + do + [[ -z ${!t} ]] && unset ${t} + done + + [[ -z ${!module_*} ]] && return 0 + + # OK so now if we have got this far, then we know we want to continue + # and generate the modules.d file. + module_modinfo="$(modinfo -p ${currm_path}.${KV_OBJ})" + module_config="${T}/modulesd-${currm}" + + ebegin "Preparing file for modules.d" + #----------------------------------------------------------------------- + echo "# modules.d configuration file for ${currm}" >> "${module_config}" + #----------------------------------------------------------------------- + [[ -n ${module_docs} ]] && \ + echo "# For more information please read:" >> "${module_config}" + for t in ${module_docs} + do + echo "# ${t//*\/}" >> "${module_config}" + done + echo >> "${module_config}" + + #----------------------------------------------------------------------- + if [[ ${module_aliases} -gt 0 ]] + then + echo "# Internal Aliases - Do not edit" >> "${module_config}" + echo "# ------------------------------" >> "${module_config}" + + for((t=0; t<${module_aliases}; t++)) + do + echo "alias $(eval echo \${MODULESD_${currm}_ALIASES[$t]})" \ + >> "${module_config}" + done + echo '' >> "${module_config}" + fi + + #----------------------------------------------------------------------- + if [[ -n ${module_modinfo} ]] + then + echo >> "${module_config}" + echo "# Configurable module parameters" >> "${module_config}" + echo "# ------------------------------" >> "${module_config}" + myIFS="${IFS}" + IFS="$(echo -en "\n\b")" + + for t in ${module_modinfo} + do + myVAR="$(echo ${t#*:} | grep -e " [0-9][ =]" | sed "s:.*\([01][= ]\).*:\1:")" + if [[ -n ${myVAR} ]] + then + module_opts="${module_opts} ${t%%:*}:${myVAR}" + fi + echo -e "# ${t%%:*}:\t${t#*:}" >> "${module_config}" + done + IFS="${myIFS}" + echo '' >> "${module_config}" + fi + + #----------------------------------------------------------------------- + if [[ $(eval echo \${MODULESD_${currm}_ALIASES[0]}) == guess ]] + then + # So lets do some guesswork eh? + if [[ -n ${module_opts} ]] + then + echo "# For Example..." >> "${module_config}" + echo "# --------------" >> "${module_config}" + for t in ${module_opts} + do + echo "# options ${currm} ${t//:*}=${t//*:}" >> "${module_config}" + done + echo '' >> "${module_config}" + fi + elif [[ ${module_examples} -gt 0 ]] + then + echo "# For Example..." >> "${module_config}" + echo "# --------------" >> "${module_config}" + for((t=0; t<${module_examples}; t++)) + do + echo "options $(eval echo \${MODULESD_${currm}_EXAMPLES[$t]})" \ + >> "${module_config}" + done + echo '' >> "${module_config}" + fi + + #----------------------------------------------------------------------- + if [[ ${module_additions} -gt 0 ]] + then + for((t=0; t<${module_additions}; t++)) + do + echo "$(eval echo \${MODULESD_${currm}_ADDITIONS[$t]})" \ + >> "${module_config}" + done + echo '' >> "${module_config}" + fi + + #----------------------------------------------------------------------- + + # then we install it + if kernel_is ge 2 6; then + insinto /etc/modprobe.d + else + insinto /etc/modules.d + fi + newins "${module_config}" "${currm_path//*\/}.conf" + + # and install any documentation we might have. + [[ -n ${module_docs} ]] && dodoc ${module_docs} + done + eend 0 + return 0 +} + +# internal function +# +# FUNCTION: find_module_params +# USAGE: A string "NAME(LIBDIR:SRCDIR:OBJDIR)" +# RETURN: The string "modulename:NAME libdir:LIBDIR srcdir:SRCDIR objdir:OBJDIR" +# DESCRIPTION: +# Analyze the specification NAME(LIBDIR:SRCDIR:OBJDIR) of one module as described in MODULE_NAMES. +find_module_params() { + debug-print-function ${FUNCNAME} $* + + local matched_offset=0 matched_opts=0 test="${@}" temp_var result + local i=0 y=0 z=0 + + for((i=0; i<=${#test}; i++)) + do + case ${test:${i}:1} in + \() matched_offset[0]=${i};; + \:) matched_opts=$((${matched_opts} + 1)); + matched_offset[${matched_opts}]="${i}";; + \)) matched_opts=$((${matched_opts} + 1)); + matched_offset[${matched_opts}]="${i}";; + esac + done + + for((i=0; i<=${matched_opts}; i++)) + do + # i = offset were working on + # y = last offset + # z = current offset - last offset + # temp_var = temporary name + case ${i} in + 0) tempvar=${test:0:${matched_offset[0]}};; + *) y=$((${matched_offset[$((${i} - 1))]} + 1)) + z=$((${matched_offset[${i}]} - ${matched_offset[$((${i} - 1))]})); + z=$((${z} - 1)) + tempvar=${test:${y}:${z}};; + esac + + case ${i} in + 0) result="${result} modulename:${tempvar}";; + 1) result="${result} libdir:${tempvar}";; + 2) result="${result} srcdir:${tempvar}";; + 3) result="${result} objdir:${tempvar}";; + esac + done + + echo ${result} +} + +# default ebuild functions +# -------------------------------- + +# @FUNCTION: linux-mod_pkg_setup +# @DESCRIPTION: +# It checks the CONFIG_CHECK options (see linux-info.eclass(5)), verifies that the kernel is +# configured, verifies that the sources are prepared, verifies that the modules support is builtin +# in the kernel and sets the object extension KV_OBJ. +linux-mod_pkg_setup() { + debug-print-function ${FUNCNAME} $* + + # If we are installing a binpkg, take a different path. + if [[ $EMERGE_FROM == binary ]]; then + linux-mod_pkg_setup_binary + return + fi + + linux-info_pkg_setup; + require_configured_kernel + check_kernel_built; + strip_modulenames; + [[ -n ${MODULE_NAMES} ]] && check_modules_supported + set_kvobj; + # Commented out with permission from johnm until a fixed version for arches + # who intentionally use different kernel and userland compilers can be + # introduced - Jason Wever , 23 Oct 2005 + #check_vermagic; +} + +# @FUNCTION: linux-mod_pkg_setup_binary +# @DESCRIPTION: +# Perform all kernel option checks non-fatally, as the .config and +# /proc/config.gz might not be present. Do not do anything that requires kernel +# sources. +linux-mod_pkg_setup_binary() { + debug-print-function ${FUNCNAME} $* + local new_CONFIG_CHECK + # ~ needs always to be quoted, else bash expands it. + for config in $CONFIG_CHECK ; do + optional='~' + [[ ${config:0:1} == "~" ]] && optional='' + new_CONFIG_CHECK="${new_CONFIG_CHECK} ${optional}${config}" + done + export CONFIG_CHECK="${new_CONFIG_CHECK}" + linux-info_pkg_setup; +} + +strip_modulenames() { + debug-print-function ${FUNCNAME} $* + + local i + for i in ${MODULE_IGNORE}; do + MODULE_NAMES=${MODULE_NAMES//${i}(*} + done +} + +# @FUNCTION: linux-mod_src_configure +# @DESCRIPTION: +# It configures all the modules specified in MODULE_NAMES. For each module the econf command is +# executed only if ECONF_PARAMS is defined, the name of the target is specified by BUILD_TARGETS +# while the options are in BUILD_PARAMS (all the modules share these variables). The compilation +# happens inside ${srcdir}. +# +# Look at the description of these variables for more details. +linux-mod_src_configure() { + debug-print-function ${FUNCNAME} $* + + local modulename libdir srcdir objdir i n myABI="${ABI}" + set_arch_to_kernel + ABI="${KERNEL_ABI}" + + BUILD_TARGETS=${BUILD_TARGETS:-clean module} + strip_modulenames; + cd "${S}" + touch Module.symvers + for i in ${MODULE_NAMES} + do + unset libdir srcdir objdir + for n in $(find_module_params ${i}) + do + eval ${n/:*}=${n/*:/} + done + libdir=${libdir:-misc} + srcdir=${srcdir:-${S}} + objdir=${objdir:-${srcdir}} + + if [ ! -f "${srcdir}/.configured" ]; + then + cd ${srcdir} + ln -s "${S}"/Module.symvers Module.symvers + einfo "Preparing ${modulename} module" + if [[ -n ${ECONF_PARAMS} ]] + then + econf ${ECONF_PARAMS} || \ + die "Unable to run econf ${ECONF_PARAMS}" + fi + + cd ${OLDPWD} + touch ${srcdir}/.configured + fi + done + + set_arch_to_portage + ABI="${myABI}" +} + + +# @FUNCTION: linux-mod_src_compile +# @DESCRIPTION: +# It compiles all the modules specified in MODULE_NAMES. For each module the econf command is +# executed only if ECONF_PARAMS is defined, the name of the target is specified by BUILD_TARGETS +# while the options are in BUILD_PARAMS (all the modules share these variables). The compilation +# happens inside ${srcdir}. +# +# Look at the description of these variables for more details. +linux-mod_src_compile() { + debug-print-function ${FUNCNAME} $* + + local modulename libdir srcdir objdir i n myABI="${ABI}" + set_arch_to_kernel + ABI="${KERNEL_ABI}" + + BUILD_TARGETS=${BUILD_TARGETS:-clean module} + strip_modulenames; + cd "${S}" + touch Module.symvers + for i in ${MODULE_NAMES} + do + unset libdir srcdir objdir + for n in $(find_module_params ${i}) + do + eval ${n/:*}=${n/*:/} + done + libdir=${libdir:-misc} + srcdir=${srcdir:-${S}} + objdir=${objdir:-${srcdir}} + + if [ ! -f "${srcdir}/.built" ]; + then + cd "${srcdir}" + ln -s "${S}"/Module.symvers Module.symvers + einfo "Preparing ${modulename} module" + if [[ -n ${ECONF_PARAMS} ]] && [ ! -f "${srcdir}/.configured" ]; + + then + econf ${ECONF_PARAMS} || \ + die "Unable to run econf ${ECONF_PARAMS}" + fi + + # This looks messy, but it is needed to handle multiple variables + # being passed in the BUILD_* stuff where the variables also have + # spaces that must be preserved. If don't do this, then the stuff + # inside the variables gets used as targets for Make, which then + # fails. + eval "emake HOSTCC=\"$(tc-getBUILD_CC)\" \ + CROSS_COMPILE=${CHOST_default}- \ + LDFLAGS=\"$(get_abi_LDFLAGS)\" \ + ${BUILD_FIXES} \ + ${BUILD_PARAMS} \ + ${BUILD_TARGETS} " \ + || die "Unable to emake HOSTCC="$(tc-getBUILD_CC)" CROSS_COMPILE=${CHOST_default}- LDFLAGS="$(get_abi_LDFLAGS)" ${BUILD_FIXES} ${BUILD_PARAMS} ${BUILD_TARGETS}" + cd "${OLDPWD}" + touch "${srcdir}"/.built + fi + done + + set_arch_to_portage + ABI="${myABI}" +} + +# @FUNCTION: linux-mod_src_install +# @DESCRIPTION: +# It install the modules specified in MODULES_NAME. The modules should be inside the ${objdir} +# directory and they are installed inside /lib/modules/${KV_FULL}/${libdir}. +# +# The modprobe.d/modules.d configuration file is automatically generated if the +# MODULESD__* variables are defined. The only way to stop this process is by +# setting MODULESD__ENABLED=no. At the end the documentation specified via +# MODULESD__DOCS is also installed. +# +# Look at the description of these variables for more details. +linux-mod_src_install() { + debug-print-function ${FUNCNAME} $* + + local modulename libdir srcdir objdir i n + + strip_modulenames; + for i in ${MODULE_NAMES} + do + unset libdir srcdir objdir + for n in $(find_module_params ${i}) + do + eval ${n/:*}=${n/*:/} + done + libdir=${libdir:-misc} + srcdir=${srcdir:-${S}} + objdir=${objdir:-${srcdir}} + + einfo "Installing ${modulename} module" + cd "${objdir}" || die "${objdir} does not exist" + insinto /lib/modules/${KV_FULL}/${libdir} + doins ${modulename}.${KV_OBJ} || die "doins ${modulename}.${KV_OBJ} failed" + cd "${OLDPWD}" + + generate_modulesd "${objdir}/${modulename}" + done +} + +# @FUNCTION: linux-mod_pkg_preinst +# @DESCRIPTION: +# It checks what to do after having merged the package. +linux-mod_pkg_preinst() { + debug-print-function ${FUNCNAME} $* + + [ -d "${D}lib/modules" ] && UPDATE_DEPMOD=true || UPDATE_DEPMOD=false + [ -d "${D}etc/modules.d" ] && UPDATE_MODULES=true || UPDATE_MODULES=false + [ -d "${D}lib/modules" ] && UPDATE_MODULEDB=true || UPDATE_MODULEDB=false +} + +# @FUNCTION: linux-mod_pkg_postinst +# @DESCRIPTION: +# It executes /sbin/depmod and adds the package to the /var/lib/module-rebuild/moduledb +# database (if ${D}/lib/modules is created) and it runs /sbin/update-modules +# (if ${D}/etc/modules.d is created). +linux-mod_pkg_postinst() { + debug-print-function ${FUNCNAME} $* + + ${UPDATE_DEPMOD} && update_depmod; + ${UPDATE_MODULES} && update_modules; + ${UPDATE_MODULEDB} && update_moduledb; +} + +# @FUNCTION: linux-mod_pkg_postrm +# @DESCRIPTION: +# It removes the package from the /var/lib/module-rebuild/moduledb database but it doens't +# call /sbin/depmod and /sbin/update-modules because the modules are still installed. +linux-mod_pkg_postrm() { + debug-print-function ${FUNCNAME} $* + remove_moduledb; +} diff --git a/eclass/mozconfig-2.eclass b/eclass/mozconfig-2.eclass new file mode 100644 index 0000000..8a8f26f --- /dev/null +++ b/eclass/mozconfig-2.eclass @@ -0,0 +1,67 @@ +# Copyright 1999-2007 Gentoo Foundation +# Distributed under the terms of the GNU General Public License v2 +# $Header: /var/cvsroot/gentoo-x86/eclass/mozconfig-2.eclass,v 1.22 2010/07/23 19:53:30 ssuominen Exp $ +# +# mozconfig.eclass: the new mozilla.eclass + +inherit multilib flag-o-matic mozcoreconf + +IUSE="debug gnome ipv6 xinerama" + +RDEPEND="x11-libs/libXrender[lib32?] + x11-libs/libXt[lib32?] + x11-libs/libXmu[lib32?] + virtual/jpeg[lib32?] + >=media-libs/libpng-1.2.1[lib32?] + dev-libs/expat[lib32?] + app-arch/zip + app-arch/unzip + >=x11-libs/gtk+-2.8.6[lib32?] + >=dev-libs/glib-2.8.2[lib32?] + >=x11-libs/pango-1.10.1[lib32?] + >=dev-libs/libIDL-0.8.0[lib32?] + gnome? ( >=gnome-base/gnome-vfs-2.3.5[lib32?] + >=gnome-base/libgnomeui-2.2.0[lib32?] ) + !=x11-libs/cairo-1.0.0[lib32?]" + #According to bugs #18573, #204520, and couple of others in Mozilla's + #bugzilla. libmng and mng support has been removed in 2003. + + +DEPEND="${RDEPEND} + xinerama? ( x11-proto/xineramaproto )" + +mozconfig_config() { + mozconfig_use_enable ipv6 + mozconfig_use_enable xinerama + + # We use --enable-pango to do truetype fonts, and currently pango + # is required for it to build + mozconfig_annotate gentoo --disable-freetype2 + + if use debug; then + mozconfig_annotate +debug \ + --enable-debug \ + --enable-tests \ + --disable-reorder \ + --enable-debugger-info-modules=ALL_MODULES + else + mozconfig_annotate -debug \ + --disable-debug \ + --disable-tests \ + --enable-reorder \ + + # Currently --enable-elf-dynstr-gc only works for x86 and ppc, + # thanks to Jason Wever for the fix. + # -- This breaks now on ppc, no idea why +# if use x86 || use ppc && [[ ${enable_optimize} != -O0 ]]; then + if use x86 && [[ ${enable_optimize} != -O0 ]]; then + mozconfig_annotate "${ARCH} optimized build" --enable-elf-dynstr-gc + fi + fi + + if ! use gnome; then + mozconfig_annotate -gnome --disable-gnomevfs + mozconfig_annotate -gnome --disable-gnomeui + fi +} diff --git a/eclass/mozconfig-3.eclass b/eclass/mozconfig-3.eclass new file mode 100644 index 0000000..33a9191 --- /dev/null +++ b/eclass/mozconfig-3.eclass @@ -0,0 +1,64 @@ +# Copyright 1999-2008 Gentoo Foundation +# Distributed under the terms of the GNU General Public License v2 +# $Header: /var/cvsroot/gentoo-x86/eclass/mozconfig-3.eclass,v 1.10 2010/07/23 19:53:30 ssuominen Exp $ +# +# mozconfig.eclass: the new mozilla.eclass + +inherit multilib flag-o-matic mozcoreconf-2 + +IUSE="gnome dbus startup-notification" + +RDEPEND="x11-libs/libXrender[lib32?] + x11-libs/libXt[lib32?] + x11-libs/libXmu[lib32?] + virtual/jpeg[lib32?] + dev-libs/expat[lib32?] + app-arch/zip + app-arch/unzip + >=x11-libs/gtk+-2.8.6[lib32?] + >=dev-libs/glib-2.8.2[lib32?] + >=x11-libs/pango-1.10.1[lib32?] + >=dev-libs/libIDL-0.8.0[lib32?] + gnome? ( >=gnome-base/gnome-vfs-2.16.3[lib32?] + >=gnome-base/libgnomeui-2.16.1[lib32?] + >=gnome-base/gconf-2.16.0[lib32?] + >=gnome-base/libgnome-2.16.0[lib32?] ) + dbus? ( >=dev-libs/dbus-glib-0.72[lib32?] ) + startup-notification? ( >=x11-libs/startup-notification-0.8[lib32?] ) + !=x11-libs/cairo-1.6.0[lib32?]" + +DEPEND="${RDEPEND}" + +mozconfig_config() { + if ${MN} || ${XUL} || ${TB}; then + mozconfig_annotate thebes --enable-default-toolkit=cairo-gtk2 + else + mozconfig_annotate -thebes --enable-default-toolkit=gtk2 + fi + + mozconfig_use_enable dbus + mozconfig_use_enable startup-notification + +# if use debug; then +# mozconfig_annotate +debug \ +# --enable-debug \ +# --enable-tests \ +# --enable-debugger-info-modules=ALL_MODULES +# else + mozconfig_annotate -debug \ + --disable-debug \ + --disable-tests + + # Currently --enable-elf-dynstr-gc only works for x86 and ppc, + # thanks to Jason Wever for the fix. + # -- This breaks now on ppc, no idea why +# if use x86 || use ppc && [[ ${enable_optimize} != -O0 ]]; then + if use x86 && [[ ${enable_optimize} != -O0 ]]; then + mozconfig_annotate "${ARCH} optimized build" --enable-elf-dynstr-gc + fi +# fi + + mozconfig_use_enable gnome gnomevfs + mozconfig_use_enable gnome gnomeui +} diff --git a/eclass/multilib-native.eclass b/eclass/multilib-native.eclass new file mode 100644 index 0000000..2a3f3ff --- /dev/null +++ b/eclass/multilib-native.eclass @@ -0,0 +1,717 @@ +# Copyright 1999-2008 Gentoo Foundation +# Distributed under the terms of the GNU General Public License v2 +# $Header: $ +# +# @ECLASS: multilib-native.eclass +# @MAINTAINER: +# Steven Newbury +# @BLURB: Provide infrastructure for native multilib ebuilds + +IUSE="${IUSE} lib32" + +DEPEND="${DEPEND} sys-apps/abi-wrapper" +RDEPEND="${RDEPEND} sys-apps/abi-wrapper" + +if use lib32; then + EMULTILIB_PKG="true" +fi + +inherit base multilib + +case "${EAPI:-0}" in + 2|3) + EXPORT_FUNCTIONS pkg_setup src_unpack src_prepare src_configure src_compile src_install pkg_preinst pkg_postinst pkg_postrm + ;; + *) + EXPORT_FUNCTIONS pkg_setup src_unpack src_compile src_install pkg_preinst pkg_postinst pkg_postrm + ;; +esac + +# ----------------------------------------------------------------------------- + +# @VARIABLE: EMULTILIB_SAVE_VARS +# @DESCRIPTION: Environment variables to save +# EMULTILIB_SAVE_VARS="${EMULTILIB_SAVE_VARS} +# AS CC CXX FC LD ASFLAGS CFLAGS CXXFLAGS FCFLAGS FFLAGS LDFLAGS +# CHOST CBUILD CDEFINE LIBDIR S CCACHE_DIR myconf PYTHON PERLBIN +# QMAKE QMAKESPEC QTBINDIR QTBASEDIR QTLIBDIR QTPCDIR +# QTPLUGINDIR CMAKE_BUILD_DIR mycmakeargs KDE_S POPPLER_MODULE_S +# ECONF_SOURCE MY_LIBDIR MOZLIBDIR SDKDIR G2CONF PKG_CONFIG_PATH +# DESTTREE SRC_PREP USE_64" +EMULTILIB_SAVE_VARS="${EMULTILIB_SAVE_VARS} + AS CC CXX FC LD ASFLAGS CFLAGS CXXFLAGS FCFLAGS FFLAGS LDFLAGS + CHOST CBUILD CDEFINE LIBDIR S CCACHE_DIR myconf PYTHON PERLBIN + QMAKE QMAKESPEC QTBINDIR QTBASEDIR QTLIBDIR QTPCDIR + QTPLUGINDIR CMAKE_BUILD_DIR mycmakeargs KDE_S POPPLER_MODULE_S + ECONF_SOURCE MY_LIBDIR MOZLIBDIR SDKDIR G2CONF PKG_CONFIG_PATH + DESTTREE SRC_PREP USE_64 osname mythreading myarch PRIV_LIB + SITE_LIB SITE_ARCH VENDOR_LIB VENDOR_ARCH ARCH_LIB" + +# @VARIABLE: EMULTILIB_SOURCE_DIRNAME +# @DESCRIPTION: Holds the name of the source directory +# EMULTILIB_SOURCE_DIRNAME="" +EMULTILIB_SOURCE_DIRNAME="" + +# @VARIABLE: EMULTILIB_SOURCE +# @DESCRIPTION: +# PATH to the top-level source directory. This may be used in multilib-ised +# ebuilds choosing to make use of external build directories for installing +# files from the top of the source tree although for builds with external +# build directories it's sometimes more appropriate to use ${ECONF_SOURCE}. +# EMULTILIB_SOURCE="" +EMULTILIB_SOURCE="" + +# @VARIABLE: EMULTILIB_RELATIVE_BUILD_DIR +# @DESCRIPTION: +# EMULTILIB_RELATIVE_BUILD_DIR="" +EMULTILIB_RELATIVE_BUILD_DIR="" + +# @VARIABLE: CMAKE_BUILD_DIR +# @DESCRIPTION: +# Despite the name, this is used for all build systems within this eclass. +# Usually this is the same as ${S}, except when using an external build +# directory. (This is per ABI and so is saved/restored for each phase.) +# CMAKE_BUILD_DIR="" +CMAKE_BUILD_DIR="" + +# @VARIABLE: EMULTILIB_INHERITED +# @DESCRIPTION: +# Holds a list of inherited eclasses +# is this var is onlky used in multilib-native_check_inherited_funcs +EMULTILIB_INHERITED="" + +# ----------------------------------------------------------------------------- + +# @FUNCTION: multilib-native_pkg_setup +# @USAGE: +# @DESCRIPTION: This is a multilib wrapper for the pkg_setup phase +multilib-native_pkg_setup() { + multilib-native_src_generic pkg_setup +} + +# @FUNCTION: multilib-native_src_unpack +# @USAGE: +# @DESCRIPTION: This is a multilib wrapper for the src_unpack phase +multilib-native_src_unpack() { + multilib-native_src_generic src_unpack +} + +# @FUNCTION: multilib-native_src_prepare +# @USAGE: +# @DESCRIPTION: This is a multilib wrapper for the src_prepare phase +multilib-native_src_prepare() { + multilib-native_src_generic src_prepare +} + +# @FUNCTION: multilib-native_src_configure +# @USAGE: +# @DESCRIPTION: This is a multilib wrapper for the src_configure phase +multilib-native_src_configure() { + multilib-native_src_generic src_configure +} + +# @FUNCTION: multilib-native_src_compile +# @USAGE: +# @DESCRIPTION: This is a multilib wrapper for the src_compile phase +multilib-native_src_compile() { + multilib-native_src_generic src_compile +} + +# @FUNCTION: multilib-native_src_install +# @USAGE: +# @DESCRIPTION: This is a multilib wrapper for the src_install phase +multilib-native_src_install() { + multilib-native_src_generic src_install +} + +# @FUNCTION: multilib-native_pkg_preinst +# @USAGE: +# @DESCRIPTION: This is a multilib wrapper for the pkg_preinst phase +multilib-native_pkg_preinst() { + multilib-native_src_generic pkg_preinst +} + +# @FUNCTION: multilib-native_pkg_postinst +# @USAGE: +# @DESCRIPTION: This is a multilib wrapper for the pkg_postinst phase +multilib-native_pkg_postinst() { + multilib-native_src_generic pkg_postinst +} + +# @FUNCTION: multilib-native_pkg_postrm +# @USAGE: +# @DESCRIPTION: This is a multilib wrapper for the pkg_postrm phase +multilib-native_pkg_postrm() { + multilib-native_src_generic pkg_postrm +} + +# @FUNCTION: multilib_debug +# @USAGE: +# @DESCRIPTION: print debug output if MULTILIB_DEBUG is set +multilib_debug() { + [[ -n ${MULTILIB_DEBUG} ]] && einfo "MULTILIB_DEBUG: ${1}=\"${2}\"" +} + +# ----------------------------------------------------------------------------- + +# Internal function +# @FUNCTION: multilib-native_src_generic +# @USAGE: +# @DESCRIPTION: Run each phase for each "install ABI" +multilib-native_src_generic() { +# Recurse this function for each ABI from get_install_abis() + if [[ -n ${EMULTILIB_PKG} ]] && [[ -z ${OABI} ]] ; then + local abilist="" + if has_multilib_profile ; then + abilist=$(get_install_abis) + einfo "${1/src_/} multilib ${PN} for ABIs: ${abilist}" + elif is_crosscompile || tc-is-cross-compiler ; then + abilist=${DEFAULT_ABI} + fi + if [[ -n ${abilist} ]] ; then + OABI=${ABI} + for ABI in ${abilist} ; do + export ABI + multilib-native_src_generic ${1} + done + ABI=${OABI} + unset OABI + return 0 + fi + fi + +# If this is the first time through, initialise the source path variables early +# and unconditionally, whether building for multilib or not. (This allows +# multilib-native ebuilds to always make use of them.) Then save the initial +# environment. +# +# Sometimes, packages assume a directory structure ABOVE "S". ("S" is set to a +# subdirectory of the tree they unpack into ${WORKDIR}.) We need to deal with +# this by finding the top-level of the source tree and keeping track of ${S} +# relative to it. + + if [[ -z ${EMULTILIB_INITIALISED[$(multilib-native_abi_to_index_key "INIT")]} ]]; then + [[ -n ${MULTILIB_DEBUG} ]] && \ + einfo "MULTILIB_DEBUG: Determining EMULTILIB_SOURCE from S and WORKDIR" + EMULTILIB_RELATIVE_BUILD_DIR="${S#*${WORKDIR}\/}" + [[ -n ${MULTILIB_DEBUG} ]] && \ + einfo "MULTILIB_DEBUG: EMULTILIB_RELATIVE_BUILD_DIR=\"${EMULTILIB_RELATIVE_BUILD_DIR}\"" + EMULTILIB_SOURCE_DIRNAME="${EMULTILIB_RELATIVE_BUILD_DIR%%/*}" + [[ -n ${MULTILIB_DEBUG} ]] && \ + einfo "MULTILIB_DEBUG: EMULTILIB_SOURCE_DIRNAME=\"${EMULTILIB_SOURCE_DIRNAME}\"" + EMULTILIB_SOURCE="${WORKDIR}/${EMULTILIB_SOURCE_DIRNAME}" + CMAKE_BUILD_DIR="${S}" + [[ -n ${MULTILIB_DEBUG} ]] && \ + einfo "MULTILIB_DEBUG: EMULTILIB_SOURCE=\"${EMULTILIB_SOURCE}\"" + multilib-native_save_abi_env "INIT" + EMULTILIB_INITIALISED[$(multilib-native_abi_to_index_key "INIT")]=1 + fi + + if [[ -n ${EMULTILIB_PKG} ]] && has_multilib_profile; then + multilib-native_src_generic_sub ${1} + +# Save the environment for this ABI + multilib-native_save_abi_env "${ABI}" + +# If this is the default ABI and we have a build tree, update the INIT +# environment + [[ "${ABI}" == "${DEFAULT_ABI}" ]] && \ + [[ -d "${WORKDIR}/${PN}_build_${ABI}" ]] && \ + multilib-native_save_abi_env "INIT" + +# This assures the environment is correctly configured for non-multilib phases +# such as a src_unpack override in ebuilds. + multilib-native_restore_abi_env "INIT" + else + multilib-native_${1}_internal + fi +} + +# Internal function +# @FUNCTION: multilib-native_src_generic_sub +# @USAGE: +# @DESCRIPTION: This function gets used for each ABI pass of each phase +multilib-native_src_generic_sub() { +# We support two kinds of build: By default we copy/move the source dir for +# each ABI. Where supported with the underlying package, we can just create an +# external build dir. This requires a modified ebuild which makes use of the +# EMULTILIB_SOURCE variable (which points the the top of the original +# source dir) to install doc files etc. This latter behaviour is enabled with +# MULTILIB_EXT_SOURCE_BUILD. For CMake based packages default is reversed and +# the CMAKE_IN_SOURCE_BUILD environment variable is used to specify the former +# behaviour. +# + + if [[ -z ${EMULTILIB_INITIALISED[$(multilib-native_abi_to_index_key ${ABI})]} ]]; then + multilib-native_restore_abi_env "INIT" + multilib-native_setup_abi_env "${ABI}" + else + multilib-native_restore_abi_env "${ABI}" + fi + +# If this is the unpack or prepare phase we only need to run for the +# DEFAULT_ABI when we are building out of the source tree since it is shared +# between each ABI. +# +# After the unpack phase, some eclasses change into the unpacked source tree +# (gnome2.eclass for example), we need to change back to the WORKDIR otherwise +# the next ABI tree will get unpacked into a subdir of previous tree. + + + case ${1/*_} in + setup) + ;; + unpack) + [[ -d "${WORKDIR}" ]] && cd "${WORKDIR}" + if multilib-native_is_EBD && \ + [[ ! "${ABI}" == "${DEFAULT_ABI}" ]]; then + einfo "Skipping ${1} for ${ABI}" + return + fi + ;; + prepare) + if multilib-native_is_EBD; then + if [[ ! "${ABI}" == "${DEFAULT_ABI}" ]]; then + einfo "Skipping ${1} for ${ABI}" + return + fi + else + [[ ! -d "${WORKDIR}/${PN}_build_${ABI}" ]] && multilib-native_setup_build_directory + fi + if [[ -d "${S}" ]]; then + einfo "Working in ${S}" + cd "${S}" + else + ewarn "Not changing to non-existant source directory" + fi + ;; + configure|compile|install) + [[ ! -d "${WORKDIR}/${PN}_build_${ABI}" ]] && multilib-native_setup_build_directory + [[ -d "${S}" ]] && cd "${S}" + ;; + *) + [[ -d "${S}" ]] && cd "${S}" + ;; + esac + + + # FIXME: There is a failure case when there is no source directory + # at ${EMULTILIB_SOURCE}, creating a directory there is the *wrong* + # thing to do, certianly not unconditionally! + # mkdir -p "${EMULTILIB_SOURCE}" + +# Call the "real" phase function + multilib-native_${1}_internal + +# If we've just unpacked the source, move it into place. + if [[ ! "${1/unpack}" == "${1}" ]] && \ + ( [[ -d "${EMULTILIB_SOURCE}" ]] && \ + [[ ! -d "${WORKDIR}/${PN}_build_${ABI}" ]] ) && ! (multilib-native_is_EBD); then + einfo "Moving source tree from ${EMULTILIB_SOURCE} to ${WORKDIR}/${PN}_build_${ABI}" + mv "${EMULTILIB_SOURCE}" "${WORKDIR}/${PN}_build_${ABI}" + S="${CMAKE_BUILD_DIR}" + [[ -n ${KDE_S} ]] && KDE_S="${S}" + [[ -n ${POPPLER_MODULE_S} ]] && \ + POPPLER_MODULE_S=${S}/${POPPLER_MODULE} + fi +} + +multilib-native_setup_build_directory() { + if multilib-native_is_EBD; then + einfo "Preparing external build directory for ABI: ${ABI} ..." + einfo "Creating build directory: ${WORKDIR}/${PN}_build_${ABI}" + mkdir -p "${CMAKE_BUILD_DIR}" + ECONF_SOURCE="${S}" + else + if [[ -d ${EMULTILIB_SOURCE} ]]; then + if ! is_final_abi; then + einfo "Copying source tree from ${EMULTILIB_SOURCE} to ${WORKDIR}/${PN}_build_${ABI}" + cp -al "${EMULTILIB_SOURCE}" "${WORKDIR}/${PN}_build_${ABI}" + else + einfo "Moving source tree from ${EMULTILIB_SOURCE} to ${WORKDIR}/${PN}_build_${ABI}" + mv "${EMULTILIB_SOURCE}" "${WORKDIR}/${PN}_build_${ABI}" + fi + fi + fi + if ([[ -n "${CMAKE_BUILD_TYPE}" ]] && \ + [[ -n "${CMAKE_IN_SOURCE_BUILD}" ]]) || \ + [[ -z "${CMAKE_BUILD_TYPE}" ]]; then + S="${CMAKE_BUILD_DIR}" + fi + +} + +# Internal function +# @FUNCTION: multilib-native_is_EBD +# @USAGE: +# @DESCRIPTION: Returns true if we're building with an "External Build Directory" +multilib-native_is_EBD() { +! ( [[ -n "${CMAKE_IN_SOURCE_BUILD}" ]] || \ + ( [[ -z "${CMAKE_BUILD_TYPE}" ]] && \ + [[ -z "${MULTILIB_EXT_SOURCE_BUILD}" ]] ) ) +} + +# Internal function +# @FUNCTION: multilib-native_setup_abi_env +# @USAGE: +# @DESCRIPTION: Setup initial environment for ABI, flags, workarounds etc. +multilib-native_setup_abi_env() { + local pyver="" libsuffix="" + [[ -z $(multilib-native_abi_to_index_key ${1}) ]] && \ + die "Unknown ABI (${1})" + +# Set the CHOST native first so that we pick up the native #202811. + export CHOST=$(get_abi_CHOST ${DEFAULT_ABI}) + export AS="$(tc-getAS)" + export CC="$(tc-getCC)" + export CXX="$(tc-getCXX)" + export FC="$(tc-getFC)" + export LD="$(tc-getLD) $(get_abi_LDFLAGS)" + export ASFLAGS="${ASFLAGS} $(get_abi_ASFLAGS)" + export CFLAGS="${CFLAGS} $(get_abi_CFLAGS)" + export CXXFLAGS="${CXXFLAGS} $(get_abi_CFLAGS)" + export FCFLAGS="${FCFLAGS} ${CFLAGS}" + export FFLAGS="${FFLAGS} ${CFLAGS}" + export CHOST=$(get_abi_CHOST $1) + export CBUILD=$(get_abi_CHOST $1) + export CDEFINE="${CDEFINE} $(get_abi_CDEFINE $1)" + export LDFLAGS="${LDFLAGS} -L/$(get_abi_LIBDIR $1) -L/usr/$(get_abi_LIBDIR $1)" + + if [[ -z PKG_CONFIG_PATH ]]; then + export PKG_CONFIG_PATH="/usr/$(get_libdir)/pkgconfig" + else + PKG_CONFIG_PATH="${PKG_CONFIG_PATH/lib*\//$(get_libdir)/}:/usr/$(get_libdir)/pkgconfig" + fi + +# if ! [[ "${ABI}" == "${DEFAULT_ABI}" ]]; then +# built_with_use dev-lang/perl lib32 && [[ "$(readlink /usr/bin/perl)" == "/usr/bin/abi-wrapper" ]] || eerror multilib-native.eclass: please rebuild dev-lang/perl to avoid problems +# pyver=$(python --version 2>&1) +# pyver=${pyver/Python /python} +# pyver=${pyver%.*} +# built_with_use dev-lang/python lib32 && [[ "$(readlink /usr/bin/${pyver})" == "/usr/bin/abi-wrapper" ]] || eerror multilib-native.eclass: please rebuild dev-lang/python to avoid problems +# fi + +# ccache is ABI dependent + if [[ -z ${CCACHE_DIR} ]] ; then + CCACHE_DIR="/var/tmp/ccache-${1}" + else + CCACHE_DIR="${CCACHE_DIR}-${1}" + fi + + CMAKE_BUILD_DIR="${WORKDIR}/${PN}_build_${ABI}/${EMULTILIB_RELATIVE_BUILD_DIR/${EMULTILIB_SOURCE_DIRNAME}}" + + # Strip any trailing slash (fixes build failure with python.eclass) + CMAKE_BUILD_DIR="${CMAKE_BUILD_DIR%/}" + + EMULTILIB_INITIALISED[$(multilib-native_abi_to_index_key ${1})]=1 +} + +# Internal function +# @FUNCTION: multilib-native_abi_to_index_key +# @USAGE: +# @RETURN: +# @DESCRIPTION: Return an array index key for a given ABI +multilib-native_abi_to_index_key() { +# Until we can count on bash version > 4, we can't use associative arrays. + local index=0 element="" + if [[ -z "${EMULTILIB_ARRAY_INDEX}" ]]; then + local abilist="" + abilist=$(get_install_abis) + EMULTILIB_ARRAY_INDEX=(INIT ${abilist}) + fi + for element in ${EMULTILIB_ARRAY_INDEX[@]}; do + [[ "${element}" == "${1}" ]] && echo "${index}" + let index++ + done +} + +# Internal function +# @FUNCTION: multilib-native_save_abi_env +# @USAGE: +# @DESCRIPTION: Save environment for ABI +multilib-native_save_abi_env() { + [[ -n ${MULTILIB_DEBUG} ]] && \ + einfo "MULTILIB_DEBUG: Saving Environment:" "${1}" + local _var _array + for _var in ${EMULTILIB_SAVE_VARS}; do + _array="EMULTILIB_${_var}" + declare -p ${_var} &>/dev/null || continue + multilib_debug ${_array}[$(multilib-native_abi_to_index_key ${1})] "${!_var}" + eval "${_array}[$(multilib-native_abi_to_index_key ${1})]"=\"${!_var}\" + done +} + +# Internal function +# @FUNCTION: multilib-native_restore_abi_env +# @USAGE: +# @DESCRIPTION: Restore environment for ABI +multilib-native_restore_abi_env() { + [[ -n ${MULTILIB_DEBUG} ]] && \ + einfo "MULTILIB_DEBUG: Restoring Environment:" "${1}" + local _var _array + for _var in ${EMULTILIB_SAVE_VARS}; do + _array="EMULTILIB_${_var}[$(multilib-native_abi_to_index_key ${1})]" + if ! (declare -p EMULTILIB_${_var} &>/dev/null) || \ + [[ -z ${!_array} ]]; then + if (declare -p ${_var} &>/dev/null); then + [[ -n ${MULTILIB_DEBUG} ]] && \ + einfo "MULTILIB_DEBUG: unsetting ${_var}" + unset ${_var} + fi + continue + fi + multilib_debug "${_var}" "${!_array}" + export ${_var}="${!_array}" + done +} + +# Internal function +# @FUNCTION multilib-native_check_inherited_funcs +# @USAGE: +# @DESCRIPTION: Checks all inherited eclasses for requested phase function +multilib-native_check_inherited_funcs() { +# Check all eclasses for given function, in order of inheritance. +# If none provides it, the var stays empty. If more have it, the last one wins. +# Ignore the ones we inherit ourselves, base doesn't matter, as we default on +# it. + local declared_func="" + if [[ -f "${T}"/eclass-debug.log ]]; then + EMULTILIB_INHERITED="$(grep ${1} "${T}"/eclass-debug.log | cut -d ' ' -f 4 | cut -d '_' -f 1)" + else + if [[ "$1" != pkg_postrm ]]; then + ewarn "You are using a package manager that does not provide "${T}"/eclass-debug.log." + ewarn "Join #gentoo-multilib-overlay on freenode to help finding another way for you." + ewarn "Falling back to old behaviour ..." + fi + EMULTILIB_INHERITED="${INHERITED}" + fi + + EMULTILIB_INHERITED="${EMULTILIB_INHERITED//base/}" + EMULTILIB_INHERITED="${EMULTILIB_INHERITED//multilib-native/}" + + multilib_debug EMULTILIB_INHERITED ${EMULTILIB_INHERITED} + + for func in ${EMULTILIB_INHERITED}; do + if [[ -n $(declare -f ${func}_${1}) ]]; then + multilib_debug declared_func "${declared_func}" + declared_func="${func}_${1}" + fi + done + + if [[ "$declared_func" == "distutils_src_unpack" ]]; then + if ! has "${EAPI:-0}" 0 1; then + unset declared_func + fi + fi + +# Now if $declared_func is still empty, none of the inherited eclasses provides +# it, so default on base.eclass. Do nothing for "phase != src_*". + if [[ -z "${declared_func}" ]]; then + if [[ "${1/_*}" != "src" ]]; then + declared_func="return" + else + declared_func="base_${1}" + fi + fi + + if [[ -z ${SRC_URI} && ( "${declared_func}" == "base_src_prepare" || "${declared_func}" == "base_src_install" ) ]]; then + # those functions do not work if we do not have sources + declared_func="return" + fi + + einfo "Using ${declared_func} for ABI ${ABI} ..." + ${declared_func} +} + +# @FUNCTION: multilib-native_src_prepare_internal +# @USAGE: +# @DESCRIPTION: Override this function if you want a custom src_configure. +multilib-native_src_prepare_internal() { + multilib-native_check_inherited_funcs src_prepare +} + +# @FUNCTION: multilib-native_src_configure_internal +# @USAGE: +# @DESCRIPTION: Override this function if you want a custom src_configure. +multilib-native_src_configure_internal() { + multilib-native_check_inherited_funcs src_configure +} + +# @FUNCTION: multilib-native_src_compile_internal +# @USAGE: +# @DESCRIPTION: Override this function if you want a custom src_compile. +multilib-native_src_compile_internal() { + multilib-native_check_inherited_funcs src_compile +} + +# @FUNCTION: multilib-native_src_install_internal +# @USAGE: +# @DESCRIPTION: Override this function if you want a custom src_install +multilib-native_src_install_internal() { + multilib-native_check_inherited_funcs src_install +} + +# @FUNCTION: multilib-native_pkg_setup_internal +# @USAGE: +# @DESCRIPTION: Override this function if you want a custom pkg_setup +multilib-native_pkg_setup_internal() { + multilib-native_check_inherited_funcs pkg_setup +} + +# @FUNCTION: multilib-native_src_unpack_internal +# @USAGE: +# @DESCRIPTION: Override this function if you want a custom src_unpack +multilib-native_src_unpack_internal() { + multilib-native_check_inherited_funcs src_unpack +} + + +# @FUNCTION: multilib-native_pkg_preinst_internal +# @USAGE: +# @DESCRIPTION: Override this function if you want a custom pkg_preinst +multilib-native_pkg_preinst_internal() { + multilib-native_check_inherited_funcs pkg_preinst +} + + +# @FUNCTION: multilib-native_pkg_postinst_internal +# @USAGE: +# @DESCRIPTION: Override this function if you want a custom pkg_postinst +multilib-native_pkg_postinst_internal() { + multilib-native_check_inherited_funcs pkg_postinst +} + + +# @FUNCTION: multilib-native_pkg_postrm_internal +# @USAGE: +# @DESCRIPTION: Override this function if you want a custom pkg_postrm +multilib-native_pkg_postrm_internal() { + multilib-native_check_inherited_funcs pkg_postrm +} + +# @FUNCTION: is_crosscompile +# @USAGE: +# @DESCRIPTION: +# True if we are cross-compiling. +# This is identical to the version in +# toolchain.eclass, but inheriting that eclass from here breaks many packages +# so just define locally. +is_crosscompile() { + [[ ${CHOST} != ${CTARGET} ]] +} + +# @FUNCTION: _check_build_dir +# @USAGE: +# @DESCRIPTION: +# This function overrides the function of the same name +# in cmake-utils.eclass. We handle the build dir ourselves. +# Determine using IN or OUT source build +_check_build_dir() { + # @ECLASS-VARIABLE: CMAKE_USE_DIR + # @DESCRIPTION: + # Sets the directory where we are working with cmake. + # For example when application uses autotools and only one + # plugin needs to be done by cmake. By default it uses ${S}. + : ${CMAKE_USE_DIR:=${S}} + +# in/out source build + echo ">>> Working in BUILD_DIR: \"$CMAKE_BUILD_DIR\"" +} + +# @FUNCTION prep_ml_binaries +# @USAGE: +# @DESCRIPTION: Use wrapper to support non-default binaries +prep_ml_binaries() { + if [[ -n $EMULTILIB_PKG ]] ; then + for binary in "$@"; do + if [[ -a ${D}/${binary} ]]; then + mv ${D}/${binary} ${D}/${binary}-${ABI} || \ + die "${D}/${binary} not found!" + einfo "mv ${D}/${binary} ${D}/${binary}-${ABI}" + if is_final_abi; then + ln -s /usr/bin/abi-wrapper ${D}/${binary} || \ + die "could link abi-wrapper to ${D}/${binary}!" + einfo "ln -s /usr/bin/abi-wrapper ${D}/${binary}" + fi + else + ewarn "${D}/${binary} does not exist, please inform the people in #gentoo-multilib-overlay on freenode" + fi + done + fi +} + +# @FUNCTION: prep_ml_includes +# @DESCRIPTION: +# Some includes (include/asm, glibc, etc) are ABI dependent. In this case, +# We can install them in different locations for each ABI and create a common +# header which includes the right one based on CDEFINE_${ABI}. If your +# package installs ABI-specific headers, just add 'prep_ml_includes' to the +# end of your src_install(). It takes a list of directories that include +# files are installed in (default is /usr/include if none are passed). +# +# Example: +# src_install() { +# ... +# prep_ml_includes /usr/qt/3/include +# } +prep_ml_includes() { + if [[ $(number_abis) -gt 1 ]] ; then + local dir + local dirs + local base + + if [[ $# -eq 0 ]] ; then + dirs=/usr/include + else + dirs="$@" + fi + + for dir in ${dirs} ; do + base=${T}/gentoo-multilib/${dir}/gentoo-multilib + mkdir -p "${base}" + [[ -d ${base}/${ABI} ]] && rm -rf "${base}/${ABI}" + mv "${D}/${dir}" "${base}/${ABI}" + done + + if is_final_abi; then + base=${T}/gentoo-multilib + + local files_differ= + local install_abis=$(get_install_abis) + local alternate_abis=${install_abis% *} + for dir in ${dirs}; do + pushd "${base}${dir}/gentoo-multilib/${ABI}" + for i in $(find . -type f); do + for diffabi in ${alternate_abis}; do + diff -q "${i}" ../${diffabi}/"${i}" >/dev/null || files_differ=1 + done + if [ -z "${files_differ}" ]; then + [ -d "${D}${dir}/${i%/*}" ] || mkdir -p "${D}${dir}/${i%/*}" + mv ${base}${dir}/gentoo-multilib/${ABI}/"${i}" "${D}${dir}/${i}" + einfo rm -rf ${base}${dir}/gentoo-multilib/*/"${i}" + rm -rf ${base}${dir}/gentoo-multilib/*/"${i}" + fi + files_differ= + done + popd + done + + + pushd "${base}" + find . | tar -c -T - -f - | tar -x --no-same-owner -f - -C "${D}" + popd + + # This 'set' stuff is required by mips profiles to properly pass + # CDEFINE's (which have spaces) to sub-functions + set -- + for dir in ${dirs} ; do + set -- "$@" "${dir}" + local abi + for abi in $(get_install_abis); do + set -- "$@" "$(get_abi_CDEFINE ${abi}):${dir}/gentoo-multilib/${abi}" + done + create_ml_includes "$@" + done + fi + fi +} diff --git a/eclass/python.eclass b/eclass/python.eclass new file mode 100644 index 0000000..0cb8d17 --- /dev/null +++ b/eclass/python.eclass @@ -0,0 +1,2609 @@ +# Copyright 1999-2010 Gentoo Foundation +# Distributed under the terms of the GNU General Public License v2 +# $Header: /var/cvsroot/gentoo-x86/eclass/python.eclass,v 1.102 2010/07/18 20:45:50 arfrever Exp $ + +# @ECLASS: python.eclass +# @MAINTAINER: +# Gentoo Python Project +# @BLURB: Eclass for Python packages +# @DESCRIPTION: +# The python eclass contains miscellaneous, useful functions for Python packages. + +inherit multilib + +if ! has "${EAPI:-0}" 0 1 2 3; then + die "API of python.eclass in EAPI=\"${EAPI}\" not established" +fi + +_CPYTHON2_SUPPORTED_ABIS=(2.4 2.5 2.6 2.7) +_CPYTHON3_SUPPORTED_ABIS=(3.0 3.1 3.2) +_JYTHON_SUPPORTED_ABIS=(2.5-jython) + +# @ECLASS-VARIABLE: PYTHON_DEPEND +# @DESCRIPTION: +# Specification of dependency on dev-lang/python. +# Syntax: +# PYTHON_DEPEND: [[!]USE_flag? ][ version_components_group] +# version_components_group: +# major_version: <2|3|*> +# minimal_version: +# maximal_version: + +_parse_PYTHON_DEPEND() { + local major_version maximal_version minimal_version python_all="0" python_maximal_version python_minimal_version python_versions=() python2="0" python2_maximal_version python2_minimal_version python3="0" python3_maximal_version python3_minimal_version USE_flag= version_components_group version_components_group_regex version_components_groups + + version_components_group_regex="(2|3|\*)(:([[:digit:]]+\.[[:digit:]]+)?(:([[:digit:]]+\.[[:digit:]]+)?)?)?" + version_components_groups="${PYTHON_DEPEND}" + + if [[ "${version_components_groups}" =~ ^((\!)?[[:alnum:]_-]+\?\ )?${version_components_group_regex}(\ ${version_components_group_regex})?$ ]]; then + if [[ "${version_components_groups}" =~ ^(\!)?[[:alnum:]_-]+\? ]]; then + USE_flag="${version_components_groups%\? *}" + version_components_groups="${version_components_groups#* }" + fi + if [[ "${version_components_groups}" =~ ("*".*" "|" *"|^2.*\ (2|\*)|^3.*\ (3|\*)) ]]; then + die "Invalid syntax of PYTHON_DEPEND: Incorrectly specified groups of versions" + fi + + version_components_groups="${version_components_groups// /$'\n'}" + while read version_components_group; do + major_version="${version_components_group:0:1}" + minimal_version="${version_components_group:2}" + minimal_version="${minimal_version%:*}" + maximal_version="${version_components_group:$((3 + ${#minimal_version}))}" + + if [[ "${major_version}" =~ ^(2|3)$ ]]; then + if [[ -n "${minimal_version}" && "${major_version}" != "${minimal_version:0:1}" ]]; then + die "Invalid syntax of PYTHON_DEPEND: Minimal version '${minimal_version}' not in specified group of versions" + fi + if [[ -n "${maximal_version}" && "${major_version}" != "${maximal_version:0:1}" ]]; then + die "Invalid syntax of PYTHON_DEPEND: Maximal version '${maximal_version}' not in specified group of versions" + fi + fi + + if [[ "${major_version}" == "2" ]]; then + python2="1" + python_versions=("${_CPYTHON2_SUPPORTED_ABIS[@]}") + python2_minimal_version="${minimal_version}" + python2_maximal_version="${maximal_version}" + elif [[ "${major_version}" == "3" ]]; then + python3="1" + python_versions=("${_CPYTHON3_SUPPORTED_ABIS[@]}") + python3_minimal_version="${minimal_version}" + python3_maximal_version="${maximal_version}" + else + python_all="1" + python_versions=("${_CPYTHON2_SUPPORTED_ABIS[@]}" "${_CPYTHON3_SUPPORTED_ABIS[@]}") + python_minimal_version="${minimal_version}" + python_maximal_version="${maximal_version}" + fi + + if [[ -n "${minimal_version}" ]] && ! has "${minimal_version}" "${python_versions[@]}"; then + die "Invalid syntax of PYTHON_DEPEND: Unrecognized minimal version '${minimal_version}'" + fi + if [[ -n "${maximal_version}" ]] && ! has "${maximal_version}" "${python_versions[@]}"; then + die "Invalid syntax of PYTHON_DEPEND: Unrecognized maximal version '${maximal_version}'" + fi + + if [[ -n "${minimal_version}" && -n "${maximal_version}" && "${minimal_version}" > "${maximal_version}" ]]; then + die "Invalid syntax of PYTHON_DEPEND: Minimal version '${minimal_version}' greater than maximal version '${maximal_version}'" + fi + done <<< "${version_components_groups}" + + _PYTHON_ATOMS=() + + _append_accepted_versions_range() { + local accepted_version="0" i + for ((i = "${#python_versions[@]}"; i >= 0; i--)); do + if [[ "${python_versions[${i}]}" == "${python_maximal_version}" ]]; then + accepted_version="1" + fi + if [[ "${accepted_version}" == "1" ]]; then + _PYTHON_ATOMS+=("=dev-lang/python-${python_versions[${i}]}*") + fi + if [[ "${python_versions[${i}]}" == "${python_minimal_version}" ]]; then + accepted_version="0" + fi + done + } + + if [[ "${python_all}" == "1" ]]; then + if [[ -z "${python_minimal_version}" && -z "${python_maximal_version}" ]]; then + _PYTHON_ATOMS+=("dev-lang/python") + else + python_versions=("${_CPYTHON2_SUPPORTED_ABIS[@]}" "${_CPYTHON3_SUPPORTED_ABIS[@]}") + python_minimal_version="${python_minimal_version:-${python_versions[0]}}" + python_maximal_version="${python_maximal_version:-${python_versions[${#python_versions[@]}-1]}}" + _append_accepted_versions_range + fi + else + if [[ "${python3}" == "1" ]]; then + if [[ -z "${python3_minimal_version}" && -z "${python3_maximal_version}" ]]; then + _PYTHON_ATOMS+=("=dev-lang/python-3*") + else + python_versions=("${_CPYTHON3_SUPPORTED_ABIS[@]}") + python_minimal_version="${python3_minimal_version:-${python_versions[0]}}" + python_maximal_version="${python3_maximal_version:-${python_versions[${#python_versions[@]}-1]}}" + _append_accepted_versions_range + fi + fi + if [[ "${python2}" == "1" ]]; then + if [[ -z "${python2_minimal_version}" && -z "${python2_maximal_version}" ]]; then + _PYTHON_ATOMS+=("=dev-lang/python-2*") + else + python_versions=("${_CPYTHON2_SUPPORTED_ABIS[@]}") + python_minimal_version="${python2_minimal_version:-${python_versions[0]}}" + python_maximal_version="${python2_maximal_version:-${python_versions[${#python_versions[@]}-1]}}" + _append_accepted_versions_range + fi + fi + fi + + unset -f _append_accepted_versions_range + + if [[ "${#_PYTHON_ATOMS[@]}" -gt 1 ]]; then + DEPEND+="${DEPEND:+ }${USE_flag}${USE_flag:+? ( }|| ( ${_PYTHON_ATOMS[@]} )${USE_flag:+ )}" + RDEPEND+="${RDEPEND:+ }${USE_flag}${USE_flag:+? ( }|| ( ${_PYTHON_ATOMS[@]} )${USE_flag:+ )}" + else + DEPEND+="${DEPEND:+ }${USE_flag}${USE_flag:+? ( }${_PYTHON_ATOMS[@]}${USE_flag:+ )}" + RDEPEND+="${RDEPEND:+ }${USE_flag}${USE_flag:+? ( }${_PYTHON_ATOMS[@]}${USE_flag:+ )}" + fi + else + die "Invalid syntax of PYTHON_DEPEND" + fi +} + +DEPEND=">=app-admin/eselect-python-20091230" +RDEPEND="${DEPEND}" + +if [[ -n "${PYTHON_DEPEND}" && -n "${NEED_PYTHON}" ]]; then + die "PYTHON_DEPEND and NEED_PYTHON cannot be set simultaneously" +elif [[ -n "${PYTHON_DEPEND}" ]]; then + _parse_PYTHON_DEPEND +elif [[ -n "${NEED_PYTHON}" ]]; then + if ! has "${EAPI:-0}" 0 1 2; then + eerror "Use PYTHON_DEPEND variable instead of NEED_PYTHON variable." + die "NEED_PYTHON variable cannot be used in this EAPI" + fi + + ewarn + ewarn "\"${EBUILD}\":" + ewarn "Deprecation Warning: NEED_PYTHON variable is deprecated and will be banned on 2010-10-01." + ewarn "Use PYTHON_DEPEND variable instead of NEED_PYTHON variable." + ewarn "The ebuild needs to be fixed. Please report a bug, if it has not been already reported." + ewarn + + unset _BOLD _NORMAL + + _PYTHON_ATOMS=(">=dev-lang/python-${NEED_PYTHON}") + DEPEND+="${DEPEND:+ }${_PYTHON_ATOMS[@]}" + RDEPEND+="${RDEPEND:+ }${_PYTHON_ATOMS[@]}" +else + _PYTHON_ATOMS=("dev-lang/python") +fi + +# @ECLASS-VARIABLE: PYTHON_USE_WITH +# @DESCRIPTION: +# Set this to a space separated list of USE flags the Python slot in use must be built with. + +# @ECLASS-VARIABLE: PYTHON_USE_WITH_OR +# @DESCRIPTION: +# Set this to a space separated list of USE flags of which one must be turned on for the slot in use. + +# @ECLASS-VARIABLE: PYTHON_USE_WITH_OPT +# @DESCRIPTION: +# Set this to a name of a USE flag if you need to make either PYTHON_USE_WITH or +# PYTHON_USE_WITH_OR atoms conditional under a USE flag. + +#add lib32? to PYTHON_USE_WITH to ensure that 32bit python is build if needed +if [[ "${PN}" != "python" ]]; then + PYTHON_USE_WITH+="${PYTHON_USE_WITH:+ }lib32?" +fi + +if ! has "${EAPI:-0}" 0 1 && [[ -n ${PYTHON_USE_WITH} || -n ${PYTHON_USE_WITH_OR} ]]; then + _PYTHON_USE_WITH_ATOMS_ARRAY=() + if [[ -n "${PYTHON_USE_WITH}" ]]; then + for _PYTHON_ATOM in "${_PYTHON_ATOMS[@]}"; do + _PYTHON_USE_WITH_ATOMS_ARRAY+=("${_PYTHON_ATOM}[${PYTHON_USE_WITH// /,}]") + done + elif [[ -n "${PYTHON_USE_WITH_OR}" ]]; then + for _USE_flag in ${PYTHON_USE_WITH_OR}; do + for _PYTHON_ATOM in "${_PYTHON_ATOMS[@]}"; do + _PYTHON_USE_WITH_ATOMS_ARRAY+=("${_PYTHON_ATOM}[${_USE_flag}]") + done + done + unset _USE_flag + fi + if [[ "${#_PYTHON_USE_WITH_ATOMS_ARRAY[@]}" -gt 1 ]]; then + _PYTHON_USE_WITH_ATOMS="|| ( ${_PYTHON_USE_WITH_ATOMS_ARRAY[@]} )" + else + _PYTHON_USE_WITH_ATOMS="${_PYTHON_USE_WITH_ATOMS_ARRAY[@]}" + fi + if [[ -n "${PYTHON_USE_WITH_OPT}" ]]; then + _PYTHON_USE_WITH_ATOMS="${PYTHON_USE_WITH_OPT}? ( ${_PYTHON_USE_WITH_ATOMS} )" + fi + DEPEND+=" ${_PYTHON_USE_WITH_ATOMS}" + RDEPEND+=" ${_PYTHON_USE_WITH_ATOMS}" + unset _PYTHON_ATOM _PYTHON_USE_WITH_ATOMS _PYTHON_USE_WITH_ATOMS_ARRAY +fi + +unset _PYTHON_ATOMS + +# ================================================================================================ +# =================================== MISCELLANEOUS FUNCTIONS ==================================== +# ================================================================================================ + +_python_implementation() { + if [[ "${CATEGORY}/${PN}" == "dev-lang/python" ]]; then + return 0 + elif [[ "${CATEGORY}/${PN}" == "dev-java/jython" ]]; then + return 0 + else + return 1 + fi +} + +_python_package_supporting_installation_for_multiple_python_abis() { + if [[ "${EBUILD_PHASE}" == "depend" ]]; then + die "${FUNCNAME}() cannot be used in global scope" + fi + + if has "${EAPI:-0}" 0 1 2 3 4; then + if [[ -n "${SUPPORT_PYTHON_ABIS}" ]]; then + return 0 + else + return 1 + fi + else + die "${FUNCNAME}(): Support for EAPI=\"${EAPI}\" not implemented" + fi +} + +_python_abi-specific_local_scope() { + [[ " ${FUNCNAME[@]:2} " =~ " "(_python_final_sanity_checks|python_execute_function|python_mod_optimize|python_mod_cleanup)" " ]] +} + +_python_initialize_prefix_variables() { + if has "${EAPI:-0}" 0 1 2; then + if [[ -n "${ROOT}" && -z "${EROOT}" ]]; then + EROOT="${ROOT%/}${EPREFIX}/" + fi + if [[ -n "${D}" && -z "${ED}" ]]; then + ED="${D%/}${EPREFIX}/" + fi + fi +} + +unset PYTHON_SANITY_CHECKS_EXECUTED PYTHON_SKIP_SANITY_CHECKS + +_python_initial_sanity_checks() { + if [[ "$(declare -p PYTHON_SANITY_CHECKS_EXECUTED 2> /dev/null)" != "declare -- PYTHON_SANITY_CHECKS_EXECUTED="* || " ${FUNCNAME[@]:1} " =~ " "(python_set_active_version|python_pkg_setup)" " && -z "${PYTHON_SKIP_SANITY_CHECKS}" ]]; then + # Ensure that /usr/bin/python and /usr/bin/python-config are valid. + if [[ "$(readlink "${EPREFIX}/usr/bin/python")" != "python-wrapper" ]]; then + eerror "'${EPREFIX}/usr/bin/python' is not valid symlink." + eerror "Use \`eselect python set \${python_interpreter}\` to fix this problem." + die "'${EPREFIX}/usr/bin/python' is not valid symlink" + fi + if [[ "$(<"${EPREFIX}/usr/bin/python-config")" != *"Gentoo python-config wrapper script"* ]]; then + eerror "'${EPREFIX}/usr/bin/python-config' is not valid script" + eerror "Use \`eselect python set \${python_interpreter}\` to fix this problem." + die "'${EPREFIX}/usr/bin/python-config' is not valid script" + fi + fi +} + +_python_final_sanity_checks() { + if ! _python_implementation && [[ "$(declare -p PYTHON_SANITY_CHECKS_EXECUTED 2> /dev/null)" != "declare -- PYTHON_SANITY_CHECKS_EXECUTED="* || " ${FUNCNAME[@]:1} " =~ " "(python_set_active_version|python_pkg_setup)" " && -z "${PYTHON_SKIP_SANITY_CHECKS}" ]]; then + local PYTHON_ABI="${PYTHON_ABI}" + for PYTHON_ABI in ${PYTHON_ABIS-${PYTHON_ABI}}; do + # Ensure that appropriate version of Python is installed. + if ! has_version "$(python_get_implementational_package)"; then + die "$(python_get_implementational_package) is not installed" + fi + + # Ensure that EPYTHON variable is respected. + if [[ "$(EPYTHON="$(PYTHON)" python -c "${_PYTHON_ABI_EXTRACTION_COMMAND}")" != "${PYTHON_ABI}" ]]; then + eerror "Path to 'python': '$(type -p python)'" + eerror "ABI: '${ABI}'" + eerror "DEFAULT_ABI: '${DEFAULT_ABI}'" + eerror "EPYTHON: '$(PYTHON)'" + eerror "PYTHON_ABI: '${PYTHON_ABI}'" + eerror "Locally active version of Python: '$(EPYTHON="$(PYTHON)" python -c "${_PYTHON_ABI_EXTRACTION_COMMAND}")'" + die "'python' does not respect EPYTHON variable" + fi + done + fi + PYTHON_SANITY_CHECKS_EXECUTED="1" +} + +# @ECLASS-VARIABLE: PYTHON_COLORS +# @DESCRIPTION: +# User-configurable colored output. +PYTHON_COLORS="${PYTHON_COLORS:-0}" + +_python_set_color_variables() { + if [[ "${PYTHON_COLORS}" != "0" && "${NOCOLOR:-false}" =~ ^(false|no)$ ]]; then + _BOLD=$'\e[1m' + _RED=$'\e[1;31m' + _GREEN=$'\e[1;32m' + _BLUE=$'\e[1;34m' + _CYAN=$'\e[1;36m' + _NORMAL=$'\e[0m' + else + _BOLD= + _RED= + _GREEN= + _BLUE= + _CYAN= + _NORMAL= + fi +} + +unset PYTHON_PKG_SETUP_EXECUTED + +_python_check_python_pkg_setup_execution() { + [[ " ${FUNCNAME[@]:1} " =~ " "(python_set_active_version|python_pkg_setup)" " ]] && return + + if ! has "${EAPI:-0}" 0 1 2 3 && [[ -z "${PYTHON_PKG_SETUP_EXECUTED}" ]]; then + die "python_pkg_setup() not called" + fi +} + +# @FUNCTION: python_pkg_setup +# @DESCRIPTION: +# Perform sanity checks and initialize environment. +# +# This function is exported in EAPI 2 and 3 when PYTHON_USE_WITH or PYTHON_USE_WITH_OR variable +# is set and always in EAPI >=4. Calling of this function is mandatory in EAPI >=4. +# +# This function can be used only in pkg_setup() phase. +python_pkg_setup() { + # Check if phase is pkg_setup(). + [[ "${EBUILD_PHASE}" != "setup" ]] && die "${FUNCNAME}() can be used only in pkg_setup() phase" + + if [[ "$#" -ne 0 ]]; then + die "${FUNCNAME}() does not accept arguments" + fi + + if _python_package_supporting_installation_for_multiple_python_abis; then + _python_calculate_PYTHON_ABIS + export EPYTHON="$(PYTHON -f)" + else + PYTHON_ABI="${PYTHON_ABI:-$(PYTHON --ABI)}" + fi + + if ! has "${EAPI:-0}" 0 1 && [[ -n "${PYTHON_USE_WITH}" || -n "${PYTHON_USE_WITH_OR}" ]]; then + if [[ "${PYTHON_USE_WITH_OPT}" ]]; then + if [[ "${PYTHON_USE_WITH_OPT}" == !* ]]; then + use ${PYTHON_USE_WITH_OPT#!} && return + else + use !${PYTHON_USE_WITH_OPT} && return + fi + fi + + python_pkg_setup_check_USE_flags() { + local python_atom USE_flag + python_atom="$(python_get_implementational_package)" + + for USE_flag in ${PYTHON_USE_WITH}; do + if ! has_version "${python_atom}[${USE_flag}]"; then + eerror "Please rebuild ${python_atom} with the following USE flags enabled: ${PYTHON_USE_WITH}" + die "Please rebuild ${python_atom} with the following USE flags enabled: ${PYTHON_USE_WITH}" + fi + done + + for USE_flag in ${PYTHON_USE_WITH_OR}; do + if has_version "${python_atom}[${USE_flag}]"; then + return + fi + done + + if [[ ${PYTHON_USE_WITH_OR} ]]; then + eerror "Please rebuild ${python_atom} with at least one of the following USE flags enabled: ${PYTHON_USE_WITH_OR}" + die "Please rebuild ${python_atom} with at least one of the following USE flags enabled: ${PYTHON_USE_WITH_OR}" + fi + } + + if _python_package_supporting_installation_for_multiple_python_abis; then + PYTHON_SKIP_SANITY_CHECKS="1" python_execute_function -q python_pkg_setup_check_USE_flags + else + python_pkg_setup_check_USE_flags + fi + + unset -f python_pkg_setup_check_USE_flags + fi + + PYTHON_PKG_SETUP_EXECUTED="1" +} + +if ! has "${EAPI:-0}" 0 1 2 3 || has "${EAPI:-0}" 2 3 && [[ -n "${PYTHON_USE_WITH}" || -n "${PYTHON_USE_WITH_OR}" ]]; then + EXPORT_FUNCTIONS pkg_setup +fi + +# @FUNCTION: python_convert_shebangs +# @USAGE: [-q|--quiet] [-r|--recursive] [-x|--only-executables] [--] [files|directories] +# @DESCRIPTION: +# Convert shebangs in specified files. Directories can be specified only with --recursive option. +python_convert_shebangs() { + _python_check_python_pkg_setup_execution + + local argument file files=() only_executables="0" python_version quiet="0" recursive="0" + + while (($#)); do + case "$1" in + -r|--recursive) + recursive="1" + ;; + -q|--quiet) + quiet="1" + ;; + -x|--only-executables) + only_executables="1" + ;; + --) + shift + break + ;; + -*) + die "${FUNCNAME}(): Unrecognized option '$1'" + ;; + *) + break + ;; + esac + shift + done + + if [[ "$#" -eq 0 ]]; then + die "${FUNCNAME}(): Missing Python version and files or directories" + elif [[ "$#" -eq 1 ]]; then + die "${FUNCNAME}(): Missing files or directories" + fi + + python_version="$1" + shift + + for argument in "$@"; do + if [[ ! -e "${argument}" ]]; then + die "${FUNCNAME}(): '${argument}' does not exist" + elif [[ -f "${argument}" ]]; then + files+=("${argument}") + elif [[ -d "${argument}" ]]; then + if [[ "${recursive}" == "1" ]]; then + while read -d $'\0' -r file; do + files+=("${file}") + done < <(find "${argument}" $([[ "${only_executables}" == "1" ]] && echo -perm /111) -type f -print0) + else + die "${FUNCNAME}(): '${argument}' is not a regular file" + fi + else + die "${FUNCNAME}(): '${argument}' is not a regular file or a directory" + fi + done + + for file in "${files[@]}"; do + file="${file#./}" + [[ "${only_executables}" == "1" && ! -x "${file}" ]] && continue + + if [[ "$(head -n1 "${file}")" =~ ^'#!'.*python ]]; then + [[ "$(sed -ne "2p" "${file}")" =~ ^"# Gentoo '".*"' wrapper script generated by python_generate_wrapper_scripts()"$ ]] && continue + + if [[ "${quiet}" == "0" ]]; then + einfo "Converting shebang in '${file}'" + fi + + sed -e "1s/python\([[:digit:]]\+\(\.[[:digit:]]\+\)\?\)\?/python${python_version}/" -i "${file}" || die "Conversion of shebang in '${file}' failed" + + # Delete potential whitespace after "#!". + sed -e '1s/\(^#!\)[[:space:]]*/\1/' -i "${file}" || die "sed '${file}' failed" + fi + done +} + +# @FUNCTION: python_clean_installation_image +# @USAGE: [-q|--quiet] +# @DESCRIPTION: +# Delete needless files in installation image. +python_clean_installation_image() { + _python_check_python_pkg_setup_execution + _python_initialize_prefix_variables + + local file files=() quiet="0" + + # Check if phase is src_install(). + [[ "${EBUILD_PHASE}" != "install" ]] && die "${FUNCNAME}() can be used only in src_install() phase" + + while (($#)); do + case "$1" in + -q|--quiet) + quiet="1" + ;; + -*) + die "${FUNCNAME}(): Unrecognized option '$1'" + ;; + *) + die "${FUNCNAME}(): Invalid usage" + ;; + esac + shift + done + + while read -d $'\0' -r file; do + files+=("${file}") + done < <(find "${ED}" "(" -name "*.py[co]" -o -name "*\$py.class" ")" -type f -print0) + + if [[ "${#files[@]}" -gt 0 ]]; then + if [[ "${quiet}" == "0" ]]; then + ewarn "Deleting byte-compiled Python modules needlessly generated by build system:" + fi + for file in "${files[@]}"; do + if [[ "${quiet}" == "0" ]]; then + ewarn " ${file}" + fi + rm -f "${file}" + + # Delete empty __pycache__ directories. + if [[ "${file%/*}" == *"/__pycache__" ]]; then + rmdir "${file%/*}" 2> /dev/null + fi + done + fi + + python_clean_sitedirs() { + if [[ -d "${ED}$(python_get_sitedir)" ]]; then + find "${ED}$(python_get_sitedir)" "(" -name "*.c" -o -name "*.h" -o -name "*.la" ")" -type f -print0 | xargs -0 rm -f + fi + } + if _python_package_supporting_installation_for_multiple_python_abis; then + python_execute_function -q python_clean_sitedirs + else + python_clean_sitedirs + fi + + unset -f python_clean_sitedirs +} + +# ================================================================================================ +# =========== FUNCTIONS FOR PACKAGES SUPPORTING INSTALLATION FOR MULTIPLE PYTHON ABIS ============ +# ================================================================================================ + +# @ECLASS-VARIABLE: SUPPORT_PYTHON_ABIS +# @DESCRIPTION: +# Set this in EAPI <= 4 to indicate that current package supports installation for +# multiple Python ABIs. + +# @ECLASS-VARIABLE: PYTHON_EXPORT_PHASE_FUNCTIONS +# @DESCRIPTION: +# Set this to export phase functions for the following ebuild phases: +# src_prepare, src_configure, src_compile, src_test, src_install. +if ! has "${EAPI:-0}" 0 1; then + python_src_prepare() { + _python_check_python_pkg_setup_execution + + if ! _python_package_supporting_installation_for_multiple_python_abis; then + die "${FUNCNAME}() cannot be used in ebuilds of packages not supporting installation for multiple Python ABIs" + fi + + if [[ "$#" -ne 0 ]]; then + die "${FUNCNAME}() does not accept arguments" + fi + + python_copy_sources + } + + for python_default_function in src_configure src_compile src_test src_install; do + eval "python_${python_default_function}() { + _python_check_python_pkg_setup_execution + + if ! _python_package_supporting_installation_for_multiple_python_abis; then + die \"\${FUNCNAME}() cannot be used in ebuilds of packages not supporting installation for multiple Python ABIs\" + fi + + python_execute_function -d -s -- \"\$@\" + }" + done + unset python_default_function + + if [[ -n "${PYTHON_EXPORT_PHASE_FUNCTIONS}" ]]; then + EXPORT_FUNCTIONS src_prepare src_configure src_compile src_test src_install + fi +fi + +unset PYTHON_ABIS + +_python_calculate_PYTHON_ABIS() { + if ! _python_package_supporting_installation_for_multiple_python_abis; then + die "${FUNCNAME}() cannot be used in ebuilds of packages not supporting installation for multiple Python ABIs" + fi + + _python_initial_sanity_checks + + # USE_${ABI_TYPE^^} and RESTRICT_${ABI_TYPE^^}_ABIS variables hopefully will be included in EAPI >= 5. + if [[ "$(declare -p PYTHON_ABIS 2> /dev/null)" != "declare -x PYTHON_ABIS="* ]] && has "${EAPI:-0}" 0 1 2 3 4; then + local PYTHON_ABI restricted_ABI support_ABI supported_PYTHON_ABIS= + PYTHON_ABI_SUPPORTED_VALUES="${_CPYTHON2_SUPPORTED_ABIS[@]} ${_CPYTHON3_SUPPORTED_ABIS[@]} ${_JYTHON_SUPPORTED_ABIS[@]}" + + if [[ "$(declare -p USE_PYTHON 2> /dev/null)" == "declare -x USE_PYTHON="* ]]; then + local cpython_enabled="0" + + if [[ -z "${USE_PYTHON}" ]]; then + die "USE_PYTHON variable is empty" + fi + + for PYTHON_ABI in ${USE_PYTHON}; do + if ! has "${PYTHON_ABI}" ${PYTHON_ABI_SUPPORTED_VALUES}; then + die "USE_PYTHON variable contains invalid value '${PYTHON_ABI}'" + fi + + if has "${PYTHON_ABI}" "${_CPYTHON2_SUPPORTED_ABIS[@]}" "${_CPYTHON3_SUPPORTED_ABIS[@]}"; then + cpython_enabled="1" + fi + + support_ABI="1" + for restricted_ABI in ${RESTRICT_PYTHON_ABIS}; do + if [[ "${PYTHON_ABI}" == ${restricted_ABI} ]]; then + support_ABI="0" + break + fi + done + [[ "${support_ABI}" == "1" ]] && export PYTHON_ABIS+="${PYTHON_ABIS:+ }${PYTHON_ABI}" + done + + if [[ -z "${PYTHON_ABIS//[${IFS}]/}" ]]; then + die "USE_PYTHON variable does not enable any Python ABI supported by ${CATEGORY}/${PF}" + fi + + if [[ "${cpython_enabled}" == "0" ]]; then + die "USE_PYTHON variable does not enable any CPython ABI" + fi + else + local python_version python2_version= python3_version= support_python_major_version + + if ! has_version "dev-lang/python"; then + die "${FUNCNAME}(): 'dev-lang/python' is not installed" + fi + + python_version="$("${EPREFIX}/usr/bin/python" -c 'from sys import version_info; print(".".join(str(x) for x in version_info[:2]))')" + + if has_version "=dev-lang/python-2*"; then + if [[ "$(readlink "${EPREFIX}/usr/bin/python2")" != "python2."* ]]; then + die "'${EPREFIX}/usr/bin/python2' is not valid symlink" + fi + + python2_version="$("${EPREFIX}/usr/bin/python2" -c 'from sys import version_info; print(".".join(str(x) for x in version_info[:2]))')" + + for PYTHON_ABI in "${_CPYTHON2_SUPPORTED_ABIS[@]}"; do + support_python_major_version="1" + for restricted_ABI in ${RESTRICT_PYTHON_ABIS}; do + if [[ "${PYTHON_ABI}" == ${restricted_ABI} ]]; then + support_python_major_version="0" + fi + done + [[ "${support_python_major_version}" == "1" ]] && break + done + if [[ "${support_python_major_version}" == "1" ]]; then + for restricted_ABI in ${RESTRICT_PYTHON_ABIS}; do + if [[ "${python2_version}" == ${restricted_ABI} ]]; then + die "Active version of Python 2 is not supported by ${CATEGORY}/${PF}" + fi + done + else + python2_version="" + fi + fi + + if has_version "=dev-lang/python-3*"; then + if [[ "$(readlink "${EPREFIX}/usr/bin/python3")" != "python3."* ]]; then + die "'${EPREFIX}/usr/bin/python3' is not valid symlink" + fi + + python3_version="$("${EPREFIX}/usr/bin/python3" -c 'from sys import version_info; print(".".join(str(x) for x in version_info[:2]))')" + + for PYTHON_ABI in "${_CPYTHON3_SUPPORTED_ABIS[@]}"; do + support_python_major_version="1" + for restricted_ABI in ${RESTRICT_PYTHON_ABIS}; do + if [[ "${PYTHON_ABI}" == ${restricted_ABI} ]]; then + support_python_major_version="0" + fi + done + [[ "${support_python_major_version}" == "1" ]] && break + done + if [[ "${support_python_major_version}" == "1" ]]; then + for restricted_ABI in ${RESTRICT_PYTHON_ABIS}; do + if [[ "${python3_version}" == ${restricted_ABI} ]]; then + die "Active version of Python 3 is not supported by ${CATEGORY}/${PF}" + fi + done + else + python3_version="" + fi + fi + + if [[ -n "${python2_version}" && "${python_version}" == "2."* && "${python_version}" != "${python2_version}" ]]; then + eerror "Python wrapper is configured incorrectly or '${EPREFIX}/usr/bin/python2' symlink" + eerror "is set incorrectly. Use \`eselect python\` to fix configuration." + die "Incorrect configuration of Python" + fi + if [[ -n "${python3_version}" && "${python_version}" == "3."* && "${python_version}" != "${python3_version}" ]]; then + eerror "Python wrapper is configured incorrectly or '${EPREFIX}/usr/bin/python3' symlink" + eerror "is set incorrectly. Use \`eselect python\` to fix configuration." + die "Incorrect configuration of Python" + fi + + PYTHON_ABIS="${python2_version} ${python3_version}" + PYTHON_ABIS="${PYTHON_ABIS# }" + export PYTHON_ABIS="${PYTHON_ABIS% }" + fi + fi + + _python_final_sanity_checks +} + +_python_prepare_flags() { + local array=() deleted_flag element flags new_value old_flag old_value operator pattern prefix variable + + for variable in CPPFLAGS CFLAGS CXXFLAGS LDFLAGS; do + eval "_PYTHON_SAVED_${variable}=\"\${!variable}\"" + for prefix in PYTHON_USER_ PYTHON_; do + if [[ "$(declare -p ${prefix}${variable} 2> /dev/null)" == "declare -a ${prefix}${variable}="* ]]; then + eval "array=(\"\${${prefix}${variable}[@]}\")" + for element in "${array[@]}"; do + if [[ "${element}" =~ ^([[:alnum:]]|\.|-|\*|\[|\])+\ (\+|-)\ .+ ]]; then + pattern="${element%% *}" + element="${element#* }" + operator="${element%% *}" + flags="${element#* }" + if [[ "${PYTHON_ABI}" == ${pattern} ]]; then + if [[ "${operator}" == "+" ]]; then + eval "export ${variable}+=\"\${variable:+ }${flags}\"" + elif [[ "${operator}" == "-" ]]; then + flags="${flags// /$'\n'}" + old_value="${!variable// /$'\n'}" + new_value="" + while read old_flag; do + while read deleted_flag; do + if [[ "${old_flag}" == ${deleted_flag} ]]; then + continue 2 + fi + done <<< "${flags}" + new_value+="${new_value:+ }${old_flag}" + done <<< "${old_value}" + eval "export ${variable}=\"\${new_value}\"" + fi + fi + else + die "Element '${element}' of ${prefix}${variable} array has invalid syntax" + fi + done + elif [[ -n "$(declare -p ${prefix}${variable} 2> /dev/null)" ]]; then + die "${prefix}${variable} should be indexed array" + fi + done + done +} + +_python_restore_flags() { + local variable + + for variable in CPPFLAGS CFLAGS CXXFLAGS LDFLAGS; do + eval "${variable}=\"\${_PYTHON_SAVED_${variable}}\"" + unset _PYTHON_SAVED_${variable} + done +} + +# @FUNCTION: python_execute_function +# @USAGE: [--action-message message] [-d|--default-function] [--failure-message message] [-f|--final-ABI] [--nonfatal] [-q|--quiet] [-s|--separate-build-dirs] [--source-dir source_directory] [--] [arguments] +# @DESCRIPTION: +# Execute specified function for each value of PYTHON_ABIS, optionally passing additional +# arguments. The specified function can use PYTHON_ABI and BUILDDIR variables. +python_execute_function() { + _python_check_python_pkg_setup_execution + + if ! _python_package_supporting_installation_for_multiple_python_abis; then + die "${FUNCNAME}() cannot be used in ebuilds of packages not supporting installation for multiple Python ABIs" + fi + + _python_set_color_variables + + local action action_message action_message_template= default_function="0" failure_message failure_message_template= final_ABI="0" function iterated_PYTHON_ABIS nonfatal="0" previous_directory previous_directory_stack previous_directory_stack_length PYTHON_ABI quiet="0" return_code separate_build_dirs="0" source_dir= + + while (($#)); do + case "$1" in + --action-message) + action_message_template="$2" + shift + ;; + -d|--default-function) + default_function="1" + ;; + --failure-message) + failure_message_template="$2" + shift + ;; + -f|--final-ABI) + final_ABI="1" + ;; + --nonfatal) + nonfatal="1" + ;; + -q|--quiet) + quiet="1" + ;; + -s|--separate-build-dirs) + separate_build_dirs="1" + ;; + --source-dir) + source_dir="$2" + shift + ;; + --) + shift + break + ;; + -*) + die "${FUNCNAME}(): Unrecognized option '$1'" + ;; + *) + break + ;; + esac + shift + done + + if [[ -n "${source_dir}" && "${separate_build_dirs}" == 0 ]]; then + die "${FUNCNAME}(): '--source-dir' option can be specified only with '--separate-build-dirs' option" + fi + + if [[ "${default_function}" == "0" ]]; then + if [[ "$#" -eq 0 ]]; then + die "${FUNCNAME}(): Missing function name" + fi + function="$1" + shift + + if [[ -z "$(type -t "${function}")" ]]; then + die "${FUNCNAME}(): '${function}' function is not defined" + fi + else + if has "${EAPI:-0}" 0 1; then + die "${FUNCNAME}(): '--default-function' option cannot be used in this EAPI" + fi + + if [[ "${EBUILD_PHASE}" == "configure" ]]; then + if has "${EAPI}" 2 3; then + python_default_function() { + econf "$@" + } + else + python_default_function() { + nonfatal econf "$@" + } + fi + elif [[ "${EBUILD_PHASE}" == "compile" ]]; then + python_default_function() { + emake "$@" + } + elif [[ "${EBUILD_PHASE}" == "test" ]]; then + python_default_function() { + if emake -j1 -n check &> /dev/null; then + emake -j1 check "$@" + elif emake -j1 -n test &> /dev/null; then + emake -j1 test "$@" + fi + } + elif [[ "${EBUILD_PHASE}" == "install" ]]; then + python_default_function() { + emake DESTDIR="${D}" install "$@" + } + else + die "${FUNCNAME}(): '--default-function' option cannot be used in this ebuild phase" + fi + function="python_default_function" + fi + + # Ensure that python_execute_function() cannot be directly or indirectly called by python_execute_function(). + if _python_abi-specific_local_scope; then + die "${FUNCNAME}(): Invalid call stack" + fi + + if [[ "${quiet}" == "0" ]]; then + [[ "${EBUILD_PHASE}" == "setup" ]] && action="Setting up" + [[ "${EBUILD_PHASE}" == "unpack" ]] && action="Unpacking" + [[ "${EBUILD_PHASE}" == "prepare" ]] && action="Preparation" + [[ "${EBUILD_PHASE}" == "configure" ]] && action="Configuration" + [[ "${EBUILD_PHASE}" == "compile" ]] && action="Building" + [[ "${EBUILD_PHASE}" == "test" ]] && action="Testing" + [[ "${EBUILD_PHASE}" == "install" ]] && action="Installation" + [[ "${EBUILD_PHASE}" == "preinst" ]] && action="Preinstallation" + [[ "${EBUILD_PHASE}" == "postinst" ]] && action="Postinstallation" + [[ "${EBUILD_PHASE}" == "prerm" ]] && action="Preuninstallation" + [[ "${EBUILD_PHASE}" == "postrm" ]] && action="Postuninstallation" + fi + + _python_calculate_PYTHON_ABIS + if [[ "${final_ABI}" == "1" ]]; then + iterated_PYTHON_ABIS="$(PYTHON -f --ABI)" + else + iterated_PYTHON_ABIS="${PYTHON_ABIS}" + fi + for PYTHON_ABI in ${iterated_PYTHON_ABIS}; do + _python_prepare_flags + + if [[ "${quiet}" == "0" ]]; then + if [[ -n "${action_message_template}" ]]; then + action_message="$(eval echo -n "${action_message_template}")" + else + action_message="${action} of ${CATEGORY}/${PF} with $(python_get_implementation) $(python_get_version)..." + fi + echo " ${_GREEN}*${_NORMAL} ${_BLUE}${action_message}${_NORMAL}" + fi + + if [[ "${separate_build_dirs}" == "1" ]]; then + if [[ -n "${source_dir}" ]]; then + export BUILDDIR="${S}/${source_dir}-${PYTHON_ABI}" + else + export BUILDDIR="${S}-${PYTHON_ABI}" + fi + pushd "${BUILDDIR}" > /dev/null || die "pushd failed" + else + export BUILDDIR="${S}" + fi + + previous_directory="$(pwd)" + previous_directory_stack="$(dirs -p)" + previous_directory_stack_length="$(dirs -p | wc -l)" + + if ! has "${EAPI}" 0 1 2 3 && has "${PYTHON_ABI}" ${FAILURE_TOLERANT_PYTHON_ABIS}; then + EPYTHON="$(PYTHON)" nonfatal "${function}" "$@" + else + EPYTHON="$(PYTHON)" "${function}" "$@" + fi + + return_code="$?" + + _python_restore_flags + + if [[ "${return_code}" -ne 0 ]]; then + if [[ -n "${failure_message_template}" ]]; then + failure_message="$(eval echo -n "${failure_message_template}")" + else + failure_message="${action} failed with $(python_get_implementation) $(python_get_version) in ${function}() function" + fi + + if [[ "${nonfatal}" == "1" ]]; then + if [[ "${quiet}" == "0" ]]; then + ewarn "${failure_message}" + fi + elif [[ "${final_ABI}" == "0" ]] && has "${PYTHON_ABI}" ${FAILURE_TOLERANT_PYTHON_ABIS}; then + if [[ "${EBUILD_PHASE}" != "test" ]] || ! has test-fail-continue ${FEATURES}; then + local enabled_PYTHON_ABIS= other_PYTHON_ABI + for other_PYTHON_ABI in ${PYTHON_ABIS}; do + [[ "${other_PYTHON_ABI}" != "${PYTHON_ABI}" ]] && enabled_PYTHON_ABIS+="${enabled_PYTHON_ABIS:+ }${other_PYTHON_ABI}" + done + export PYTHON_ABIS="${enabled_PYTHON_ABIS}" + fi + if [[ "${quiet}" == "0" ]]; then + ewarn "${failure_message}" + fi + if [[ -z "${PYTHON_ABIS}" ]]; then + die "${function}() function failed with all enabled Python ABIs" + fi + else + die "${failure_message}" + fi + fi + + # Ensure that directory stack has not been decreased. + if [[ "$(dirs -p | wc -l)" -lt "${previous_directory_stack_length}" ]]; then + die "Directory stack decreased illegally" + fi + + # Avoid side effects of earlier returning from the specified function. + while [[ "$(dirs -p | wc -l)" -gt "${previous_directory_stack_length}" ]]; do + popd > /dev/null || die "popd failed" + done + + # Ensure that the bottom part of directory stack has not been changed. Restore + # previous directory (from before running of the specified function) before + # comparison of directory stacks to avoid mismatch of directory stacks after + # potential using of 'cd' to change current directory. Restoration of previous + # directory allows to safely use 'cd' to change current directory in the + # specified function without changing it back to original directory. + cd "${previous_directory}" + if [[ "$(dirs -p)" != "${previous_directory_stack}" ]]; then + die "Directory stack changed illegally" + fi + + if [[ "${separate_build_dirs}" == "1" ]]; then + popd > /dev/null || die "popd failed" + fi + unset BUILDDIR + done + + if [[ "${default_function}" == "1" ]]; then + unset -f python_default_function + fi +} + +# @FUNCTION: python_copy_sources +# @USAGE: [directory] +# @DESCRIPTION: +# Copy unpacked sources of current package to separate build directory for each Python ABI. +python_copy_sources() { + _python_check_python_pkg_setup_execution + + if ! _python_package_supporting_installation_for_multiple_python_abis; then + die "${FUNCNAME}() cannot be used in ebuilds of packages not supporting installation for multiple Python ABIs" + fi + + local dir dirs=() PYTHON_ABI + + if [[ "$#" -eq 0 ]]; then + if [[ "${WORKDIR}" == "${S}" ]]; then + die "${FUNCNAME}() cannot be used with current value of S variable" + fi + dirs=("${S%/}") + else + dirs=("$@") + fi + + _python_calculate_PYTHON_ABIS + for PYTHON_ABI in ${PYTHON_ABIS}; do + for dir in "${dirs[@]}"; do + cp -pr "${dir}" "${dir}-${PYTHON_ABI}" > /dev/null || die "Copying of sources failed" + done + done +} + +# @FUNCTION: python_generate_wrapper_scripts +# @USAGE: [-E|--respect-EPYTHON] [-f|--force] [-q|--quiet] [--] [files] +# @DESCRIPTION: +# Generate wrapper scripts. Existing files are overwritten only with --force option. +# If --respect-EPYTHON option is specified, then generated wrapper scripts will +# respect EPYTHON variable at run time. +python_generate_wrapper_scripts() { + _python_check_python_pkg_setup_execution + + if ! _python_package_supporting_installation_for_multiple_python_abis; then + die "${FUNCNAME}() cannot be used in ebuilds of packages not supporting installation for multiple Python ABIs" + fi + + _python_initialize_prefix_variables + + local eselect_python_option file force="0" quiet="0" PYTHON_ABI python2_enabled="0" python3_enabled="0" respect_EPYTHON="0" + + while (($#)); do + case "$1" in + -E|--respect-EPYTHON) + respect_EPYTHON="1" + ;; + -f|--force) + force="1" + ;; + -q|--quiet) + quiet="1" + ;; + --) + shift + break + ;; + -*) + die "${FUNCNAME}(): Unrecognized option '$1'" + ;; + *) + break + ;; + esac + shift + done + + if [[ "$#" -eq 0 ]]; then + die "${FUNCNAME}(): Missing arguments" + fi + + _python_calculate_PYTHON_ABIS + for PYTHON_ABI in "${_CPYTHON2_SUPPORTED_ABIS[@]}"; do + if has "${PYTHON_ABI}" ${PYTHON_ABIS}; then + python2_enabled="1" + fi + done + for PYTHON_ABI in "${_CPYTHON3_SUPPORTED_ABIS[@]}"; do + if has "${PYTHON_ABI}" ${PYTHON_ABIS}; then + python3_enabled="1" + fi + done + + if [[ "${python2_enabled}" == "1" && "${python3_enabled}" == "1" ]]; then + eselect_python_option= + elif [[ "${python2_enabled}" == "1" && "${python3_enabled}" == "0" ]]; then + eselect_python_option="--python2" + elif [[ "${python2_enabled}" == "0" && "${python3_enabled}" == "1" ]]; then + eselect_python_option="--python3" + else + die "${FUNCNAME}(): Unsupported environment" + fi + + for file in "$@"; do + if [[ -f "${file}" && "${force}" == "0" ]]; then + die "${FUNCNAME}(): '$1' already exists" + fi + + if [[ "${quiet}" == "0" ]]; then + einfo "Generating '${file#${ED%/}}' wrapper script" + fi + + cat << EOF > "${file}" +#!/usr/bin/env python +# Gentoo '${file##*/}' wrapper script generated by python_generate_wrapper_scripts() + +import os +import re +import subprocess +import sys + +EPYTHON_re = re.compile(r"^python(\d+\.\d+)$") +python_shebang_re = re.compile(r"^#! *(${EPREFIX}/usr/bin/python|(${EPREFIX})?/usr/bin/env +(${EPREFIX}/usr/bin/)?python)") +python_verification_output_re = re.compile("^GENTOO_PYTHON_TARGET_SCRIPT_PATH supported\n$") + +EOF + if [[ "$?" != "0" ]]; then + die "${FUNCNAME}(): Generation of '$1' failed" + fi + if [[ "${respect_EPYTHON}" == "1" ]]; then + cat << EOF >> "${file}" +EPYTHON = os.environ.get("EPYTHON") +if EPYTHON: + EPYTHON_matched = EPYTHON_re.match(EPYTHON) + if EPYTHON_matched: + PYTHON_ABI = EPYTHON_matched.group(1) + else: + sys.stderr.write("EPYTHON variable has unrecognized value '%s'\n" % EPYTHON) + sys.exit(1) +else: + try: + eselect_process = subprocess.Popen(["${EPREFIX}/usr/bin/eselect", "python", "show"${eselect_python_option:+, $(echo "\"")}${eselect_python_option}${eselect_python_option:+$(echo "\"")}], stdout=subprocess.PIPE) + if eselect_process.wait() != 0: + raise ValueError + except (OSError, ValueError): + sys.stderr.write("Execution of 'eselect python show${eselect_python_option:+ }${eselect_python_option}' failed\n") + sys.exit(1) + + EPYTHON = eselect_process.stdout.read() + if not isinstance(EPYTHON, str): + # Python 3 + EPYTHON = EPYTHON.decode() + EPYTHON = EPYTHON.rstrip("\n") + + EPYTHON_matched = EPYTHON_re.match(EPYTHON) + if EPYTHON_matched: + PYTHON_ABI = EPYTHON_matched.group(1) + else: + sys.stderr.write("'eselect python show${eselect_python_option:+ }${eselect_python_option}' printed unrecognized value '%s'\n" % EPYTHON) + sys.exit(1) +EOF + if [[ "$?" != "0" ]]; then + die "${FUNCNAME}(): Generation of '$1' failed" + fi + else + cat << EOF >> "${file}" +try: + eselect_process = subprocess.Popen(["${EPREFIX}/usr/bin/eselect", "python", "show"${eselect_python_option:+, $(echo "\"")}${eselect_python_option}${eselect_python_option:+$(echo "\"")}], stdout=subprocess.PIPE) + if eselect_process.wait() != 0: + raise ValueError +except (OSError, ValueError): + sys.stderr.write("Execution of 'eselect python show${eselect_python_option:+ }${eselect_python_option}' failed\n") + sys.exit(1) + +EPYTHON = eselect_process.stdout.read() +if not isinstance(EPYTHON, str): + # Python 3 + EPYTHON = EPYTHON.decode() +EPYTHON = EPYTHON.rstrip("\n") + +EPYTHON_matched = EPYTHON_re.match(EPYTHON) +if EPYTHON_matched: + PYTHON_ABI = EPYTHON_matched.group(1) +else: + sys.stderr.write("'eselect python show${eselect_python_option:+ }${eselect_python_option}' printed unrecognized value '%s'\n" % EPYTHON) + sys.exit(1) +EOF + if [[ "$?" != "0" ]]; then + die "${FUNCNAME}(): Generation of '$1' failed" + fi + fi + cat << EOF >> "${file}" + +wrapper_script_path = os.path.realpath(sys.argv[0]) +target_executable_path = "%s-%s" % (wrapper_script_path, PYTHON_ABI) +os.environ["GENTOO_PYTHON_PROCESS_NAME"] = os.path.basename(sys.argv[0]) +os.environ["GENTOO_PYTHON_WRAPPER_SCRIPT_PATH"] = sys.argv[0] +os.environ["GENTOO_PYTHON_TARGET_SCRIPT_PATH"] = target_executable_path +if not os.path.exists(target_executable_path): + sys.stderr.write("'%s' does not exist\n" % target_executable_path) + sys.exit(1) + +target_executable = open(target_executable_path, "rb") +target_executable_first_line = target_executable.readline() +if not isinstance(target_executable_first_line, str): + # Python 3 + target_executable_first_line = target_executable_first_line.decode("utf_8", "replace") + +python_shebang_matched = python_shebang_re.match(target_executable_first_line) +target_executable.close() + +if python_shebang_matched: + try: + python_interpreter_path = "${EPREFIX}/usr/bin/%s" % EPYTHON + os.environ["GENTOO_PYTHON_TARGET_SCRIPT_PATH_VERIFICATION"] = "1" + python_verification_process = subprocess.Popen([python_interpreter_path, "-c", "pass"], stdout=subprocess.PIPE) + del os.environ["GENTOO_PYTHON_TARGET_SCRIPT_PATH_VERIFICATION"] + if python_verification_process.wait() != 0: + raise ValueError + + python_verification_output = python_verification_process.stdout.read() + if not isinstance(python_verification_output, str): + # Python 3 + python_verification_output = python_verification_output.decode() + + if not python_verification_output_re.match(python_verification_output): + raise ValueError + + os.execv(python_interpreter_path, [python_interpreter_path] + sys.argv) + except: + pass + if "GENTOO_PYTHON_TARGET_SCRIPT_PATH_VERIFICATION" in os.environ: + del os.environ["GENTOO_PYTHON_TARGET_SCRIPT_PATH_VERIFICATION"] + +os.execv(target_executable_path, sys.argv) +EOF + if [[ "$?" != "0" ]]; then + die "${FUNCNAME}(): Generation of '$1' failed" + fi + fperms +x "${file#${ED%/}}" || die "fperms '${file}' failed" + done +} + +# ================================================================================================ +# ========= FUNCTIONS FOR PACKAGES NOT SUPPORTING INSTALLATION FOR MULTIPLE PYTHON ABIS ========== +# ================================================================================================ + +unset EPYTHON PYTHON_ABI + +# @FUNCTION: python_set_active_version +# @USAGE: +# @DESCRIPTION: +# Set specified version of CPython as active version of Python. +# +# This function can be used only in pkg_setup() phase. +python_set_active_version() { + # Check if phase is pkg_setup(). + [[ "${EBUILD_PHASE}" != "setup" ]] && die "${FUNCNAME}() can be used only in pkg_setup() phase" + + if _python_package_supporting_installation_for_multiple_python_abis; then + die "${FUNCNAME}() cannot be used in ebuilds of packages supporting installation for multiple Python ABIs" + fi + + if [[ "$#" -ne 1 ]]; then + die "${FUNCNAME}() requires 1 argument" + fi + + _python_initial_sanity_checks + + if [[ -z "${PYTHON_ABI}" ]]; then + if [[ "$1" =~ ^[[:digit:]]+\.[[:digit:]]+$ ]]; then + if ! _python_implementation && ! has_version "dev-lang/python:$1"; then + die "${FUNCNAME}(): 'dev-lang/python:$1' is not installed" + fi + export EPYTHON="$(PYTHON "$1")" + elif [[ "$1" == "2" ]]; then + if ! _python_implementation && ! has_version "=dev-lang/python-2*"; then + die "${FUNCNAME}(): '=dev-lang/python-2*' is not installed" + fi + export EPYTHON="$(PYTHON -2)" + elif [[ "$1" == "3" ]]; then + if ! _python_implementation && ! has_version "=dev-lang/python-3*"; then + die "${FUNCNAME}(): '=dev-lang/python-3*' is not installed" + fi + export EPYTHON="$(PYTHON -3)" + else + die "${FUNCNAME}(): Unrecognized argument '$1'" + fi + + # PYTHON_ABI variable is intended to be used only in ebuilds/eclasses, + # so it does not need to be exported to subprocesses. + PYTHON_ABI="${EPYTHON#python}" + PYTHON_ABI="${PYTHON_ABI%%-*}" + fi + + _python_final_sanity_checks + + # python-updater checks PYTHON_REQUESTED_ACTIVE_VERSION variable. + PYTHON_REQUESTED_ACTIVE_VERSION="$1" +} + +# @FUNCTION: python_need_rebuild +# @DESCRIPTION: Mark current package for rebuilding by python-updater after +# switching of active version of Python. +python_need_rebuild() { + _python_check_python_pkg_setup_execution + + if _python_package_supporting_installation_for_multiple_python_abis; then + die "${FUNCNAME}() cannot be used in ebuilds of packages supporting installation for multiple Python ABIs" + fi + + if [[ "$#" -ne 0 ]]; then + die "${FUNCNAME}() does not accept arguments" + fi + + export PYTHON_NEED_REBUILD="$(PYTHON --ABI)" +} + +# ================================================================================================ +# ======================================= GETTER FUNCTIONS ======================================= +# ================================================================================================ + +_PYTHON_ABI_EXTRACTION_COMMAND='import platform +import sys +sys.stdout.write(".".join(str(x) for x in sys.version_info[:2])) +if platform.system()[:4] == "Java": + sys.stdout.write("-jython")' + +_python_get_implementation() { + if [[ "$#" -ne 1 ]]; then + die "${FUNCNAME}() requires 1 argument" + fi + + if [[ "$1" =~ ^[[:digit:]]+\.[[:digit:]]+$ ]]; then + echo "CPython" + elif [[ "$1" =~ ^[[:digit:]]+\.[[:digit:]]+-jython$ ]]; then + echo "Jython" + else + die "${FUNCNAME}(): Unrecognized Python ABI '$1'" + fi +} + +# @FUNCTION: PYTHON +# @USAGE: [-2] [-3] [--ABI] [-a|--absolute-path] [-f|--final-ABI] [--] +# @DESCRIPTION: +# Print filename of Python interpreter for specified Python ABI. If Python_ABI argument +# is ommitted, then PYTHON_ABI environment variable must be set and is used. +# If -2 option is specified, then active version of Python 2 is used. +# If -3 option is specified, then active version of Python 3 is used. +# If --final-ABI option is specified, then final ABI from the list of enabled ABIs is used. +# -2, -3 and --final-ABI options and Python_ABI argument cannot be specified simultaneously. +# If --ABI option is specified, then only specified Python ABI is printed instead of +# filename of Python interpreter. +# If --absolute-path option is specified, then absolute path to Python interpreter is printed. +# --ABI and --absolute-path options cannot be specified simultaneously. +PYTHON() { + _python_check_python_pkg_setup_execution + + local ABI_output="0" absolute_path_output="0" final_ABI="0" PYTHON_ABI="${PYTHON_ABI}" python_interpreter python2="0" python3="0" + + while (($#)); do + case "$1" in + -2) + python2="1" + ;; + -3) + python3="1" + ;; + --ABI) + ABI_output="1" + ;; + -a|--absolute-path) + absolute_path_output="1" + ;; + -f|--final-ABI) + final_ABI="1" + ;; + --) + shift + break + ;; + -*) + die "${FUNCNAME}(): Unrecognized option '$1'" + ;; + *) + break + ;; + esac + shift + done + + if [[ "${ABI_output}" == "1" && "${absolute_path_output}" == "1" ]]; then + die "${FUNCNAME}(): '--ABI' and '--absolute-path' options cannot be specified simultaneously" + fi + + if [[ "$((${python2} + ${python3} + ${final_ABI}))" -gt 1 ]]; then + die "${FUNCNAME}(): '-2', '-3' or '--final-ABI' options cannot be specified simultaneously" + fi + + if [[ "$#" -eq 0 ]]; then + if [[ "${final_ABI}" == "1" ]]; then + if ! _python_package_supporting_installation_for_multiple_python_abis; then + die "${FUNCNAME}(): '--final-ABI' option cannot be used in ebuilds of packages not supporting installation for multiple Python ABIs" + fi + _python_calculate_PYTHON_ABIS + PYTHON_ABI="${PYTHON_ABIS##* }" + elif [[ "${python2}" == "1" ]]; then + PYTHON_ABI="$(eselect python show --python2 --ABI)" + if [[ -z "${PYTHON_ABI}" ]]; then + die "${FUNCNAME}(): Active version of Python 2 not set" + elif [[ "${PYTHON_ABI}" != "2."* ]]; then + die "${FUNCNAME}(): Internal error in \`eselect python show --python2\`" + fi + elif [[ "${python3}" == "1" ]]; then + PYTHON_ABI="$(eselect python show --python3 --ABI)" + if [[ -z "${PYTHON_ABI}" ]]; then + die "${FUNCNAME}(): Active version of Python 3 not set" + elif [[ "${PYTHON_ABI}" != "3."* ]]; then + die "${FUNCNAME}(): Internal error in \`eselect python show --python3\`" + fi + elif _python_package_supporting_installation_for_multiple_python_abis; then + if ! _python_abi-specific_local_scope; then + die "${FUNCNAME}() should be used in ABI-specific local scope" + fi + else + PYTHON_ABI="$("${EPREFIX}/usr/bin/python" -c "${_PYTHON_ABI_EXTRACTION_COMMAND}")" + if [[ -z "${PYTHON_ABI}" ]]; then + die "${FUNCNAME}(): Failure of extraction of locally active version of Python" + fi + fi + elif [[ "$#" -eq 1 ]]; then + if [[ "${final_ABI}" == "1" ]]; then + die "${FUNCNAME}(): '--final-ABI' option and Python ABI cannot be specified simultaneously" + fi + if [[ "${python2}" == "1" ]]; then + die "${FUNCNAME}(): '-2' option and Python ABI cannot be specified simultaneously" + fi + if [[ "${python3}" == "1" ]]; then + die "${FUNCNAME}(): '-3' option and Python ABI cannot be specified simultaneously" + fi + PYTHON_ABI="$1" + else + die "${FUNCNAME}(): Invalid usage" + fi + + if [[ "${ABI_output}" == "1" ]]; then + echo -n "${PYTHON_ABI}" + return + else + if [[ "$(_python_get_implementation "${PYTHON_ABI}")" == "CPython" ]]; then + python_interpreter="python${PYTHON_ABI}" + elif [[ "$(_python_get_implementation "${PYTHON_ABI}")" == "Jython" ]]; then + python_interpreter="jython-${PYTHON_ABI%-jython}" + fi + + if [[ "${absolute_path_output}" == "1" ]]; then + echo -n "${EPREFIX}/usr/bin/${python_interpreter}" + else + echo -n "${python_interpreter}" + fi + fi + + if [[ -n "${ABI}" && "${ABI}" != "${DEFAULT_ABI}" && "${DEFAULT_ABI}" != "default" ]]; then + echo -n "-${ABI}" + fi +} + +# @FUNCTION: python_get_implementation +# @USAGE: [-f|--final-ABI] +# @DESCRIPTION: +# Print name of Python implementation. +# If --final-ABI option is specified, then final ABI from the list of enabled ABIs is used. +python_get_implementation() { + _python_check_python_pkg_setup_execution + + local final_ABI="0" PYTHON_ABI="${PYTHON_ABI}" + + while (($#)); do + case "$1" in + -f|--final-ABI) + final_ABI="1" + ;; + -*) + die "${FUNCNAME}(): Unrecognized option '$1'" + ;; + *) + die "${FUNCNAME}(): Invalid usage" + ;; + esac + shift + done + + if [[ "${final_ABI}" == "1" ]]; then + if ! _python_package_supporting_installation_for_multiple_python_abis; then + die "${FUNCNAME}(): '--final-ABI' option cannot be used in ebuilds of packages not supporting installation for multiple Python ABIs" + fi + PYTHON_ABI="$(PYTHON -f --ABI)" + else + if _python_package_supporting_installation_for_multiple_python_abis; then + if ! _python_abi-specific_local_scope; then + die "${FUNCNAME}() should be used in ABI-specific local scope" + fi + else + PYTHON_ABI="${PYTHON_ABI:-$(PYTHON --ABI)}" + fi + fi + + echo "$(_python_get_implementation "${PYTHON_ABI}")" +} + +# @FUNCTION: python_get_implementational_package +# @USAGE: [-f|--final-ABI] +# @DESCRIPTION: +# Print category, name and slot of package providing Python implementation. +# If --final-ABI option is specified, then final ABI from the list of enabled ABIs is used. +python_get_implementational_package() { + _python_check_python_pkg_setup_execution + + local final_ABI="0" PYTHON_ABI="${PYTHON_ABI}" + + while (($#)); do + case "$1" in + -f|--final-ABI) + final_ABI="1" + ;; + -*) + die "${FUNCNAME}(): Unrecognized option '$1'" + ;; + *) + die "${FUNCNAME}(): Invalid usage" + ;; + esac + shift + done + + if [[ "${final_ABI}" == "1" ]]; then + if ! _python_package_supporting_installation_for_multiple_python_abis; then + die "${FUNCNAME}(): '--final-ABI' option cannot be used in ebuilds of packages not supporting installation for multiple Python ABIs" + fi + PYTHON_ABI="$(PYTHON -f --ABI)" + else + if _python_package_supporting_installation_for_multiple_python_abis; then + if ! _python_abi-specific_local_scope; then + die "${FUNCNAME}() should be used in ABI-specific local scope" + fi + else + PYTHON_ABI="${PYTHON_ABI:-$(PYTHON --ABI)}" + fi + fi + + if [[ "$(_python_get_implementation "${PYTHON_ABI}")" == "CPython" ]]; then + echo "dev-lang/python:${PYTHON_ABI}" + elif [[ "$(_python_get_implementation "${PYTHON_ABI}")" == "Jython" ]]; then + echo "dev-java/jython:${PYTHON_ABI%-jython}" + fi +} + +# @FUNCTION: python_get_includedir +# @USAGE: [-b|--base-path] [-f|--final-ABI] +# @DESCRIPTION: +# Print path to Python include directory. +# If --base-path option is specified, then path not prefixed with "/" is printed. +# If --final-ABI option is specified, then final ABI from the list of enabled ABIs is used. +python_get_includedir() { + _python_check_python_pkg_setup_execution + + local base_path="0" final_ABI="0" prefix PYTHON_ABI="${PYTHON_ABI}" + + while (($#)); do + case "$1" in + -b|--base-path) + base_path="1" + ;; + -f|--final-ABI) + final_ABI="1" + ;; + -*) + die "${FUNCNAME}(): Unrecognized option '$1'" + ;; + *) + die "${FUNCNAME}(): Invalid usage" + ;; + esac + shift + done + + if [[ "${base_path}" == "0" ]]; then + prefix="/" + fi + + if [[ "${final_ABI}" == "1" ]]; then + if ! _python_package_supporting_installation_for_multiple_python_abis; then + die "${FUNCNAME}(): '--final-ABI' option cannot be used in ebuilds of packages not supporting installation for multiple Python ABIs" + fi + PYTHON_ABI="$(PYTHON -f --ABI)" + else + if _python_package_supporting_installation_for_multiple_python_abis; then + if ! _python_abi-specific_local_scope; then + die "${FUNCNAME}() should be used in ABI-specific local scope" + fi + else + PYTHON_ABI="${PYTHON_ABI:-$(PYTHON --ABI)}" + fi + fi + + if [[ "$(_python_get_implementation "${PYTHON_ABI}")" == "CPython" ]]; then + echo "${prefix}usr/include/python${PYTHON_ABI}" + elif [[ "$(_python_get_implementation "${PYTHON_ABI}")" == "Jython" ]]; then + echo "${prefix}usr/share/jython-${PYTHON_ABI%-jython}/Include" + fi +} + +# @FUNCTION: python_get_libdir +# @USAGE: [-b|--base-path] [-f|--final-ABI] +# @DESCRIPTION: +# Print path to Python library directory. +# If --base-path option is specified, then path not prefixed with "/" is printed. +# If --final-ABI option is specified, then final ABI from the list of enabled ABIs is used. +python_get_libdir() { + _python_check_python_pkg_setup_execution + + local base_path="0" final_ABI="0" prefix PYTHON_ABI="${PYTHON_ABI}" + + while (($#)); do + case "$1" in + -b|--base-path) + base_path="1" + ;; + -f|--final-ABI) + final_ABI="1" + ;; + -*) + die "${FUNCNAME}(): Unrecognized option '$1'" + ;; + *) + die "${FUNCNAME}(): Invalid usage" + ;; + esac + shift + done + + if [[ "${base_path}" == "0" ]]; then + prefix="/" + fi + + if [[ "${final_ABI}" == "1" ]]; then + if ! _python_package_supporting_installation_for_multiple_python_abis; then + die "${FUNCNAME}(): '--final-ABI' option cannot be used in ebuilds of packages not supporting installation for multiple Python ABIs" + fi + PYTHON_ABI="$(PYTHON -f --ABI)" + else + if _python_package_supporting_installation_for_multiple_python_abis; then + if ! _python_abi-specific_local_scope; then + die "${FUNCNAME}() should be used in ABI-specific local scope" + fi + else + PYTHON_ABI="${PYTHON_ABI:-$(PYTHON --ABI)}" + fi + fi + + if [[ "$(_python_get_implementation "${PYTHON_ABI}")" == "CPython" ]]; then + echo "${prefix}usr/$(get_libdir)/python${PYTHON_ABI}" + elif [[ "$(_python_get_implementation "${PYTHON_ABI}")" == "Jython" ]]; then + echo "${prefix}usr/share/jython-${PYTHON_ABI%-jython}/Lib" + fi +} + +# @FUNCTION: python_get_sitedir +# @USAGE: [-b|--base-path] [-f|--final-ABI] +# @DESCRIPTION: +# Print path to Python site-packages directory. +# If --base-path option is specified, then path not prefixed with "/" is printed. +# If --final-ABI option is specified, then final ABI from the list of enabled ABIs is used. +python_get_sitedir() { + _python_check_python_pkg_setup_execution + + local final_ABI="0" options=() + + while (($#)); do + case "$1" in + -b|--base-path) + options+=("$1") + ;; + -f|--final-ABI) + final_ABI="1" + options+=("$1") + ;; + -*) + die "${FUNCNAME}(): Unrecognized option '$1'" + ;; + *) + die "${FUNCNAME}(): Invalid usage" + ;; + esac + shift + done + + if [[ "${final_ABI}" == "1" ]]; then + if ! _python_package_supporting_installation_for_multiple_python_abis; then + die "${FUNCNAME}(): '--final-ABI' option cannot be used in ebuilds of packages not supporting installation for multiple Python ABIs" + fi + else + if _python_package_supporting_installation_for_multiple_python_abis && ! _python_abi-specific_local_scope; then + die "${FUNCNAME}() should be used in ABI-specific local scope" + fi + fi + + echo "$(python_get_libdir "${options[@]}")/site-packages" +} + +# @FUNCTION: python_get_library +# @USAGE: [-b|--base-path] [-f|--final-ABI] [-l|--linker-option] +# @DESCRIPTION: +# Print path to Python library. +# If --base-path option is specified, then path not prefixed with "/" is printed. +# If --linker-option is specified, then "-l${library}" linker option is printed. +# If --final-ABI option is specified, then final ABI from the list of enabled ABIs is used. +python_get_library() { + _python_check_python_pkg_setup_execution + + local base_path="0" final_ABI="0" linker_option="0" prefix PYTHON_ABI="${PYTHON_ABI}" + + while (($#)); do + case "$1" in + -b|--base-path) + base_path="1" + ;; + -f|--final-ABI) + final_ABI="1" + ;; + -l|--linker-option) + linker_option="1" + ;; + -*) + die "${FUNCNAME}(): Unrecognized option '$1'" + ;; + *) + die "${FUNCNAME}(): Invalid usage" + ;; + esac + shift + done + + if [[ "${base_path}" == "0" ]]; then + prefix="/" + fi + + if [[ "${base_path}" == "1" && "${linker_option}" == "1" ]]; then + die "${FUNCNAME}(): '--base-path' and '--linker-option' options cannot be specified simultaneously" + fi + + if [[ "${final_ABI}" == "1" ]]; then + if ! _python_package_supporting_installation_for_multiple_python_abis; then + die "${FUNCNAME}(): '--final-ABI' option cannot be used in ebuilds of packages not supporting installation for multiple Python ABIs" + fi + PYTHON_ABI="$(PYTHON -f --ABI)" + else + if _python_package_supporting_installation_for_multiple_python_abis; then + if ! _python_abi-specific_local_scope; then + die "${FUNCNAME}() should be used in ABI-specific local scope" + fi + else + PYTHON_ABI="${PYTHON_ABI:-$(PYTHON --ABI)}" + fi + fi + + if [[ "$(_python_get_implementation "${PYTHON_ABI}")" == "CPython" ]]; then + if [[ "${linker_option}" == "1" ]]; then + echo "-lpython${PYTHON_ABI}" + else + echo "${prefix}usr/$(get_libdir)/libpython${PYTHON_ABI}$(get_libname)" + fi + elif [[ "$(_python_get_implementation "${PYTHON_ABI}")" == "Jython" ]]; then + die "${FUNCNAME}(): Jython does not have shared library" + fi +} + +# @FUNCTION: python_get_version +# @USAGE: [-f|--final-ABI] [--full] [--major] [--minor] [--micro] +# @DESCRIPTION: +# Print Python version. +# --full, --major, --minor and --micro options cannot be specified simultaneously. +# If --full, --major, --minor and --micro options are not specified, then "${major_version}.${minor_version}" is printed. +# If --final-ABI option is specified, then final ABI from the list of enabled ABIs is used. +python_get_version() { + _python_check_python_pkg_setup_execution + + local final_ABI="0" full="0" major="0" minor="0" micro="0" python_command + + while (($#)); do + case "$1" in + -f|--final-ABI) + final_ABI="1" + ;; + --full) + full="1" + ;; + --major) + major="1" + ;; + --minor) + minor="1" + ;; + --micro) + micro="1" + ;; + -*) + die "${FUNCNAME}(): Unrecognized option '$1'" + ;; + *) + die "${FUNCNAME}(): Invalid usage" + ;; + esac + shift + done + + if [[ "$((${full} + ${major} + ${minor} + ${micro}))" -gt 1 ]]; then + die "${FUNCNAME}(): '--full', '--major', '--minor' or '--micro' options cannot be specified simultaneously" + fi + + if [[ "${full}" == "1" ]]; then + python_command="from sys import version_info; print('.'.join(str(x) for x in version_info[:3]))" + elif [[ "${major}" == "1" ]]; then + python_command="from sys import version_info; print(version_info[0])" + elif [[ "${minor}" == "1" ]]; then + python_command="from sys import version_info; print(version_info[1])" + elif [[ "${micro}" == "1" ]]; then + python_command="from sys import version_info; print(version_info[2])" + else + if [[ -n "${PYTHON_ABI}" && "${final_ABI}" == "0" ]]; then + if [[ "$(_python_get_implementation "${PYTHON_ABI}")" == "CPython" ]]; then + echo "${PYTHON_ABI}" + elif [[ "$(_python_get_implementation "${PYTHON_ABI}")" == "Jython" ]]; then + echo "${PYTHON_ABI%-jython}" + fi + return + fi + python_command="from sys import version_info; print('.'.join(str(x) for x in version_info[:2]))" + fi + + if [[ "${final_ABI}" == "1" ]]; then + if ! _python_package_supporting_installation_for_multiple_python_abis; then + die "${FUNCNAME}(): '--final-ABI' option cannot be used in ebuilds of packages not supporting installation for multiple Python ABIs" + fi + "$(PYTHON -f)" -c "${python_command}" + else + if _python_package_supporting_installation_for_multiple_python_abis && ! _python_abi-specific_local_scope; then + die "${FUNCNAME}() should be used in ABI-specific local scope" + fi + "$(PYTHON ${PYTHON_ABI})" -c "${python_command}" + fi +} + +# ================================================================================================ +# ================================ FUNCTIONS FOR RUNNING OF TESTS ================================ +# ================================================================================================ + +# @ECLASS-VARIABLE: PYTHON_TEST_VERBOSITY +# @DESCRIPTION: +# User-configurable verbosity of tests of Python modules. +# Supported values: 0, 1, 2, 3, 4. +PYTHON_TEST_VERBOSITY="${PYTHON_TEST_VERBOSITY:-1}" + +_python_test_hook() { + if [[ "$#" -ne 1 ]]; then + die "${FUNCNAME}() requires 1 argument" + fi + + if _python_package_supporting_installation_for_multiple_python_abis && [[ "$(type -t "${FUNCNAME[3]}_$1_hook")" == "function" ]]; then + "${FUNCNAME[3]}_$1_hook" + fi +} + +# @FUNCTION: python_execute_nosetests +# @USAGE: [-P|--PYTHONPATH PYTHONPATH] [-s|--separate-build-dirs] [--] [arguments] +# @DESCRIPTION: +# Execute nosetests for all enabled Python ABIs. +# In ebuilds of packages supporting installation for multiple Python ABIs, this function calls +# python_execute_nosetests_pre_hook() and python_execute_nosetests_post_hook(), if they are defined. +python_execute_nosetests() { + _python_check_python_pkg_setup_execution + _python_set_color_variables + + local PYTHONPATH_template= separate_build_dirs= + + while (($#)); do + case "$1" in + -P|--PYTHONPATH) + PYTHONPATH_template="$2" + shift + ;; + -s|--separate-build-dirs) + separate_build_dirs="1" + ;; + --) + shift + break + ;; + -*) + die "${FUNCNAME}(): Unrecognized option '$1'" + ;; + *) + break + ;; + esac + shift + done + + python_test_function() { + local evaluated_PYTHONPATH + + evaluated_PYTHONPATH="$(eval echo -n "${PYTHONPATH_template}")" + + _python_test_hook pre + + if [[ -n "${evaluated_PYTHONPATH}" ]]; then + echo ${_BOLD}PYTHONPATH="${evaluated_PYTHONPATH}" nosetests --verbosity="${PYTHON_TEST_VERBOSITY}" "$@"${_NORMAL} + PYTHONPATH="${evaluated_PYTHONPATH}" nosetests --verbosity="${PYTHON_TEST_VERBOSITY}" "$@" || return "$?" + else + echo ${_BOLD}nosetests --verbosity="${PYTHON_TEST_VERBOSITY}" "$@"${_NORMAL} + nosetests --verbosity="${PYTHON_TEST_VERBOSITY}" "$@" || return "$?" + fi + + _python_test_hook post + } + if _python_package_supporting_installation_for_multiple_python_abis; then + python_execute_function ${separate_build_dirs:+-s} python_test_function "$@" + else + if [[ -n "${separate_build_dirs}" ]]; then + die "${FUNCNAME}(): Invalid usage" + fi + python_test_function "$@" || die "Testing failed" + fi + + unset -f python_test_function +} + +# @FUNCTION: python_execute_py.test +# @USAGE: [-P|--PYTHONPATH PYTHONPATH] [-s|--separate-build-dirs] [--] [arguments] +# @DESCRIPTION: +# Execute py.test for all enabled Python ABIs. +# In ebuilds of packages supporting installation for multiple Python ABIs, this function calls +# python_execute_py.test_pre_hook() and python_execute_py.test_post_hook(), if they are defined. +python_execute_py.test() { + _python_check_python_pkg_setup_execution + _python_set_color_variables + + local PYTHONPATH_template= separate_build_dirs= + + while (($#)); do + case "$1" in + -P|--PYTHONPATH) + PYTHONPATH_template="$2" + shift + ;; + -s|--separate-build-dirs) + separate_build_dirs="1" + ;; + --) + shift + break + ;; + -*) + die "${FUNCNAME}(): Unrecognized option '$1'" + ;; + *) + break + ;; + esac + shift + done + + python_test_function() { + local evaluated_PYTHONPATH + + evaluated_PYTHONPATH="$(eval echo -n "${PYTHONPATH_template}")" + + _python_test_hook pre + + if [[ -n "${evaluated_PYTHONPATH}" ]]; then + echo ${_BOLD}PYTHONPATH="${evaluated_PYTHONPATH}" py.test $([[ "${PYTHON_TEST_VERBOSITY}" -ge 2 ]] && echo -v) "$@"${_NORMAL} + PYTHONPATH="${evaluated_PYTHONPATH}" py.test $([[ "${PYTHON_TEST_VERBOSITY}" -ge 2 ]] && echo -v) "$@" || return "$?" + else + echo ${_BOLD}py.test $([[ "${PYTHON_TEST_VERBOSITY}" -gt 1 ]] && echo -v) "$@"${_NORMAL} + py.test $([[ "${PYTHON_TEST_VERBOSITY}" -gt 1 ]] && echo -v) "$@" || return "$?" + fi + + _python_test_hook post + } + if _python_package_supporting_installation_for_multiple_python_abis; then + python_execute_function ${separate_build_dirs:+-s} python_test_function "$@" + else + if [[ -n "${separate_build_dirs}" ]]; then + die "${FUNCNAME}(): Invalid usage" + fi + python_test_function "$@" || die "Testing failed" + fi + + unset -f python_test_function +} + +# @FUNCTION: python_execute_trial +# @USAGE: [-P|--PYTHONPATH PYTHONPATH] [-s|--separate-build-dirs] [--] [arguments] +# @DESCRIPTION: +# Execute trial for all enabled Python ABIs. +# In ebuilds of packages supporting installation for multiple Python ABIs, this function +# calls python_execute_trial_pre_hook() and python_execute_trial_post_hook(), if they are defined. +python_execute_trial() { + _python_check_python_pkg_setup_execution + _python_set_color_variables + + local PYTHONPATH_template= separate_build_dirs= + + while (($#)); do + case "$1" in + -P|--PYTHONPATH) + PYTHONPATH_template="$2" + shift + ;; + -s|--separate-build-dirs) + separate_build_dirs="1" + ;; + --) + shift + break + ;; + -*) + die "${FUNCNAME}(): Unrecognized option '$1'" + ;; + *) + break + ;; + esac + shift + done + + python_test_function() { + local evaluated_PYTHONPATH + + evaluated_PYTHONPATH="$(eval echo -n "${PYTHONPATH_template}")" + + _python_test_hook pre + + if [[ -n "${evaluated_PYTHONPATH}" ]]; then + echo ${_BOLD}PYTHONPATH="${evaluated_PYTHONPATH}" trial $([[ "${PYTHON_TEST_VERBOSITY}" -ge 4 ]] && echo --spew) "$@"${_NORMAL} + PYTHONPATH="${evaluated_PYTHONPATH}" trial $([[ "${PYTHON_TEST_VERBOSITY}" -ge 4 ]] && echo --spew) "$@" || return "$?" + else + echo ${_BOLD}trial $([[ "${PYTHON_TEST_VERBOSITY}" -ge 4 ]] && echo --spew) "$@"${_NORMAL} + trial $([[ "${PYTHON_TEST_VERBOSITY}" -ge 4 ]] && echo --spew) "$@" || return "$?" + fi + + _python_test_hook post + } + if _python_package_supporting_installation_for_multiple_python_abis; then + python_execute_function ${separate_build_dirs:+-s} python_test_function "$@" + else + if [[ -n "${separate_build_dirs}" ]]; then + die "${FUNCNAME}(): Invalid usage" + fi + python_test_function "$@" || die "Testing failed" + fi + + unset -f python_test_function +} + +# ================================================================================================ +# ======================= FUNCTIONS FOR HANDLING OF BYTE-COMPILED MODULES ======================== +# ================================================================================================ + +# @FUNCTION: python_enable_pyc +# @DESCRIPTION: +# Tell Python to automatically recompile modules to .pyc/.pyo if the +# timestamps/version stamps have changed. +python_enable_pyc() { + _python_check_python_pkg_setup_execution + + if [[ "$#" -ne 0 ]]; then + die "${FUNCNAME}() does not accept arguments" + fi + + unset PYTHONDONTWRITEBYTECODE +} + +# @FUNCTION: python_disable_pyc +# @DESCRIPTION: +# Tell Python not to automatically recompile modules to .pyc/.pyo +# even if the timestamps/version stamps do not match. This is done +# to protect sandbox. +python_disable_pyc() { + _python_check_python_pkg_setup_execution + + if [[ "$#" -ne 0 ]]; then + die "${FUNCNAME}() does not accept arguments" + fi + + export PYTHONDONTWRITEBYTECODE="1" +} + +_python_clean_compiled_modules() { + _python_initialize_prefix_variables + _python_set_color_variables + + [[ "${FUNCNAME[1]}" =~ ^(python_mod_optimize|python_mod_compile|python_mod_cleanup)$ ]] || die "${FUNCNAME}(): Invalid usage" + + local base_module_name compiled_file compiled_files=() dir path py_file root + + # Strip trailing slash from EROOT. + root="${EROOT%/}" + + for path in "$@"; do + compiled_files=() + if [[ -d "${path}" ]]; then + while read -d $'\0' -r compiled_file; do + compiled_files+=("${compiled_file}") + done < <(find "${path}" "(" -name "*.py[co]" -o -name "*\$py.class" ")" -print0) + + if [[ "${EBUILD_PHASE}" == "postrm" ]]; then + # Delete empty child directories. + find "${path}" -type d | sort -r | while read -r dir; do + if rmdir "${dir}" 2> /dev/null; then + echo "${_CYAN}<<< ${dir}${_NORMAL}" + fi + done + fi + elif [[ "${path}" == *.py ]]; then + base_module_name="${path##*/}" + base_module_name="${base_module_name%.py}" + if [[ -d "${path%/*}/__pycache__" ]]; then + while read -d $'\0' -r compiled_file; do + compiled_files+=("${compiled_file}") + done < <(find "${path%/*}/__pycache__" "(" -name "${base_module_name}.*.py[co]" -o -name "${base_module_name}\$py.class" ")" -print0) + fi + compiled_files+=("${path}c" "${path}o" "${path%.py}\$py.class") + fi + + for compiled_file in "${compiled_files[@]}"; do + [[ ! -f "${compiled_file}" ]] && continue + dir="${compiled_file%/*}" + dir="${dir##*/}" + if [[ "${compiled_file}" == *.py[co] ]]; then + if [[ "${dir}" == "__pycache__" ]]; then + base_module_name="${compiled_file##*/}" + base_module_name="${base_module_name%.*py[co]}" + base_module_name="${base_module_name%.*}" + py_file="${compiled_file%__pycache__/*}${base_module_name}.py" + else + py_file="${compiled_file%[co]}" + fi + if [[ "${EBUILD_PHASE}" == "postinst" ]]; then + [[ -f "${py_file}" && "${compiled_file}" -nt "${py_file}" ]] && continue + else + [[ -f "${py_file}" ]] && continue + fi + echo "${_BLUE}<<< ${compiled_file%[co]}[co]${_NORMAL}" + rm -f "${compiled_file%[co]}"[co] + elif [[ "${compiled_file}" == *\$py.class ]]; then + if [[ "${dir}" == "__pycache__" ]]; then + base_module_name="${compiled_file##*/}" + base_module_name="${base_module_name%\$py.class}" + py_file="${compiled_file%__pycache__/*}${base_module_name}.py" + else + py_file="${compiled_file%\$py.class}" + fi + if [[ "${EBUILD_PHASE}" == "postinst" ]]; then + [[ -f "${py_file}" && "${compiled_file}" -nt "${py_file}" ]] && continue + else + [[ -f "${py_file}" ]] && continue + fi + echo "${_BLUE}<<< ${compiled_file}${_NORMAL}" + rm -f "${compiled_file}" + else + die "${FUNCNAME}(): Unrecognized file type: '${compiled_file}'" + fi + + # Delete empty parent directories. + dir="${compiled_file%/*}" + while [[ "${dir}" != "${root}" ]]; do + if rmdir "${dir}" 2> /dev/null; then + echo "${_CYAN}<<< ${dir}${_NORMAL}" + else + break + fi + dir="${dir%/*}" + done + done + done +} + +# @FUNCTION: python_mod_optimize +# @USAGE: [options] [directory|file] +# @DESCRIPTION: +# If no arguments supplied, it will recompile not recursively all modules +# under sys.path (eg. /usr/lib/python2.6, /usr/lib/python2.6/site-packages). +# +# If supplied with arguments, it will recompile all modules recursively +# in the supplied directory. +# +# Options passed to this function are passed to compileall.py. +# +# This function can be used only in pkg_postinst() phase. +python_mod_optimize() { + _python_check_python_pkg_setup_execution + _python_initialize_prefix_variables + + # Check if phase is pkg_postinst(). + [[ "${EBUILD_PHASE}" != "postinst" ]] && die "${FUNCNAME}() can be used only in pkg_postinst() phase" + + if ! has "${EAPI:-0}" 0 1 2 || _python_package_supporting_installation_for_multiple_python_abis; then + # PYTHON_ABI variable cannot be local in packages not supporting installation for multiple Python ABIs. + local dir file iterated_PYTHON_ABIS options=() other_dirs=() other_files=() previous_PYTHON_ABI="${PYTHON_ABI}" return_code root site_packages_absolute_dirs=() site_packages_dirs=() site_packages_absolute_files=() site_packages_files=() + + if _python_package_supporting_installation_for_multiple_python_abis; then + if has "${EAPI:-0}" 0 1 2 3 && [[ -z "${PYTHON_ABIS}" ]]; then + die "${FUNCNAME}(): python_pkg_setup() or python_execute_function() not called" + fi + iterated_PYTHON_ABIS="${PYTHON_ABIS}" + else + if has "${EAPI:-0}" 0 1 2 3; then + iterated_PYTHON_ABIS="${PYTHON_ABI:=$(PYTHON --ABI)}" + else + iterated_PYTHON_ABIS="${PYTHON_ABI}" + fi + fi + + # Strip trailing slash from EROOT. + root="${EROOT%/}" + + while (($#)); do + case "$1" in + -l|-f|-q) + options+=("$1") + ;; + -d|-x) + options+=("$1" "$2") + shift + ;; + --) + shift + break + ;; + -*) + die "${FUNCNAME}(): Unrecognized option '$1'" + ;; + *) + break + ;; + esac + shift + done + + if [[ "$#" -eq 0 ]]; then + ewarn + ewarn "Deprecation Warning: Not passing of paths to ${FUNCNAME}() is deprecated and will be" + ewarn "disallowed on 2010-09-01. Call ${FUNCNAME}() with paths to Python modules." + ewarn "The ebuild needs to be fixed. Please report a bug, if it has not been already reported." + ewarn + fi + + while (($#)); do + if [[ "$1" =~ ^($|(\.|\.\.|/)($|/)) ]]; then + die "${FUNCNAME}(): Invalid argument '$1'" + elif ! _python_implementation && [[ "$1" =~ ^/usr/lib(32|64)?/python[[:digit:]]+\.[[:digit:]]+ ]]; then + die "${FUNCNAME}(): Paths of directories / files in site-packages directories must be relative to site-packages directories" + elif [[ "$1" =~ ^/ ]]; then + if _python_package_supporting_installation_for_multiple_python_abis; then + die "${FUNCNAME}(): Absolute paths cannot be used in ebuilds of packages supporting installation for multiple Python ABIs" + fi + if [[ -d "${root}$1" ]]; then + other_dirs+=("${root}$1") + elif [[ -f "${root}$1" ]]; then + other_files+=("${root}$1") + elif [[ -e "${root}$1" ]]; then + eerror "${FUNCNAME}(): '${root}$1' is not a regular file or a directory" + else + eerror "${FUNCNAME}(): '${root}$1' does not exist" + fi + else + for PYTHON_ABI in ${iterated_PYTHON_ABIS}; do + if [[ -d "${root}$(python_get_sitedir)/$1" ]]; then + site_packages_dirs+=("$1") + break + elif [[ -f "${root}$(python_get_sitedir)/$1" ]]; then + site_packages_files+=("$1") + break + elif [[ -e "${root}$(python_get_sitedir)/$1" ]]; then + eerror "${FUNCNAME}(): '$1' is not a regular file or a directory" + else + eerror "${FUNCNAME}(): '$1' does not exist" + fi + done + fi + shift + done + + # Set additional options. + options+=("-q") + + for PYTHON_ABI in ${iterated_PYTHON_ABIS}; do + if ((${#site_packages_dirs[@]})) || ((${#site_packages_files[@]})); then + return_code="0" + ebegin "Compilation and optimization of Python modules for $(python_get_implementation) $(python_get_version)" + if ((${#site_packages_dirs[@]})); then + for dir in "${site_packages_dirs[@]}"; do + site_packages_absolute_dirs+=("${root}$(python_get_sitedir)/${dir}") + done + "$(PYTHON)" "${root}$(python_get_libdir)/compileall.py" "${options[@]}" "${site_packages_absolute_dirs[@]}" || return_code="1" + if [[ "$(_python_get_implementation "${PYTHON_ABI}")" != "Jython" ]]; then + "$(PYTHON)" -O "${root}$(python_get_libdir)/compileall.py" "${options[@]}" "${site_packages_absolute_dirs[@]}" &> /dev/null || return_code="1" + fi + _python_clean_compiled_modules "${site_packages_absolute_dirs[@]}" + fi + if ((${#site_packages_files[@]})); then + for file in "${site_packages_files[@]}"; do + site_packages_absolute_files+=("${root}$(python_get_sitedir)/${file}") + done + "$(PYTHON)" "${root}$(python_get_libdir)/py_compile.py" "${site_packages_absolute_files[@]}" || return_code="1" + if [[ "$(_python_get_implementation "${PYTHON_ABI}")" != "Jython" ]]; then + "$(PYTHON)" -O "${root}$(python_get_libdir)/py_compile.py" "${site_packages_absolute_files[@]}" &> /dev/null || return_code="1" + fi + _python_clean_compiled_modules "${site_packages_absolute_files[@]}" + fi + eend "${return_code}" + fi + unset site_packages_absolute_dirs site_packages_absolute_files + done + + if _python_package_supporting_installation_for_multiple_python_abis; then + # Restore previous value of PYTHON_ABI. + if [[ -n "${previous_PYTHON_ABI}" ]]; then + PYTHON_ABI="${previous_PYTHON_ABI}" + else + unset PYTHON_ABI + fi + fi + + if ((${#other_dirs[@]})) || ((${#other_files[@]})); then + return_code="0" + ebegin "Compilation and optimization of Python modules placed outside of site-packages directories for $(python_get_implementation) $(python_get_version)" + if ((${#other_dirs[@]})); then + "$(PYTHON ${PYTHON_ABI})" "${root}$(python_get_libdir)/compileall.py" "${options[@]}" "${other_dirs[@]}" || return_code="1" + if [[ "$(_python_get_implementation "${PYTHON_ABI}")" != "Jython" ]]; then + "$(PYTHON ${PYTHON_ABI})" -O "${root}$(python_get_libdir)/compileall.py" "${options[@]}" "${other_dirs[@]}" &> /dev/null || return_code="1" + fi + _python_clean_compiled_modules "${other_dirs[@]}" + fi + if ((${#other_files[@]})); then + "$(PYTHON ${PYTHON_ABI})" "${root}$(python_get_libdir)/py_compile.py" "${other_files[@]}" || return_code="1" + if [[ "$(_python_get_implementation "${PYTHON_ABI}")" != "Jython" ]]; then + "$(PYTHON ${PYTHON_ABI})" -O "${root}$(python_get_libdir)/py_compile.py" "${other_files[@]}" &> /dev/null || return_code="1" + fi + _python_clean_compiled_modules "${other_dirs[@]}" + fi + eend "${return_code}" + fi + else + # Deprecated part of python_mod_optimize() + + local myroot mydirs=() myfiles=() myopts=() return_code="0" + + # strip trailing slash + myroot="${EROOT%/}" + + # respect EROOT and options passed to compileall.py + while (($#)); do + case "$1" in + -l|-f|-q) + myopts+=("$1") + ;; + -d|-x) + myopts+=("$1" "$2") + shift + ;; + --) + shift + break + ;; + -*) + die "${FUNCNAME}(): Unrecognized option '$1'" + ;; + *) + break + ;; + esac + shift + done + + if [[ "$#" -eq 0 ]]; then + ewarn + ewarn "Deprecation Warning: Not passing of paths to ${FUNCNAME}() is deprecated and will be" + ewarn "disallowed on 2010-09-01. Call ${FUNCNAME}() with paths to Python modules." + ewarn "The ebuild needs to be fixed. Please report a bug, if it has not been already reported." + ewarn + fi + + while (($#)); do + if [[ "$1" =~ ^($|(\.|\.\.|/)($|/)) ]]; then + die "${FUNCNAME}(): Invalid argument '$1'" + elif [[ -d "${myroot}/${1#/}" ]]; then + mydirs+=("${myroot}/${1#/}") + elif [[ -f "${myroot}/${1#/}" ]]; then + # Files are passed to python_mod_compile which is EROOT-aware + myfiles+=("$1") + elif [[ -e "${myroot}/${1#/}" ]]; then + eerror "${FUNCNAME}(): ${myroot}/${1#/} is not a regular file or directory" + else + eerror "${FUNCNAME}(): ${myroot}/${1#/} does not exist" + fi + shift + done + + # set additional opts + myopts+=(-q) + + PYTHON_ABI="${PYTHON_ABI:-$(PYTHON --ABI)}" + + ebegin "Compilation and optimization of Python modules for $(python_get_implementation) $(python_get_version)" + if ((${#mydirs[@]})); then + "$(PYTHON ${PYTHON_ABI})" "${myroot}$(python_get_libdir)/compileall.py" "${myopts[@]}" "${mydirs[@]}" || return_code="1" + "$(PYTHON ${PYTHON_ABI})" -O "${myroot}$(python_get_libdir)/compileall.py" "${myopts[@]}" "${mydirs[@]}" &> /dev/null || return_code="1" + _python_clean_compiled_modules "${mydirs[@]}" + fi + + if ((${#myfiles[@]})); then + python_mod_compile "${myfiles[@]}" + fi + + eend "${return_code}" + fi +} + +# @FUNCTION: python_mod_cleanup +# @USAGE: [directory|file] +# @DESCRIPTION: +# Run with optional arguments, where arguments are Python modules. If none given, +# it will look in /usr/lib/python[0-9].[0-9]. +# +# It will recursively scan all compiled Python modules in the directories and +# determine if they are orphaned (i.e. their corresponding .py files are missing.) +# If they are, then it will remove their corresponding .pyc and .pyo files. +# +# This function can be used only in pkg_postrm() phase. +python_mod_cleanup() { + _python_check_python_pkg_setup_execution + _python_initialize_prefix_variables + + local dir iterated_PYTHON_ABIS PYTHON_ABI="${PYTHON_ABI}" root search_paths=() sitedir + + # Check if phase is pkg_postrm(). + [[ "${EBUILD_PHASE}" != "postrm" ]] && die "${FUNCNAME}() can be used only in pkg_postrm() phase" + + if _python_package_supporting_installation_for_multiple_python_abis; then + if has "${EAPI:-0}" 0 1 2 3 && [[ -z "${PYTHON_ABIS}" ]]; then + die "${FUNCNAME}(): python_pkg_setup() or python_execute_function() not called" + fi + iterated_PYTHON_ABIS="${PYTHON_ABIS}" + else + if has "${EAPI:-0}" 0 1 2 3; then + iterated_PYTHON_ABIS="${PYTHON_ABI:-$(PYTHON --ABI)}" + else + iterated_PYTHON_ABIS="${PYTHON_ABI}" + fi + fi + + # Strip trailing slash from EROOT. + root="${EROOT%/}" + + if [[ "$#" -gt 0 ]]; then + if ! has "${EAPI:-0}" 0 1 2 || _python_package_supporting_installation_for_multiple_python_abis; then + while (($#)); do + if [[ "$1" =~ ^($|(\.|\.\.|/)($|/)) ]]; then + die "${FUNCNAME}(): Invalid argument '$1'" + elif ! _python_implementation && [[ "$1" =~ ^/usr/lib(32|64)?/python[[:digit:]]+\.[[:digit:]]+ ]]; then + die "${FUNCNAME}(): Paths of directories / files in site-packages directories must be relative to site-packages directories" + elif [[ "$1" =~ ^/ ]]; then + if _python_package_supporting_installation_for_multiple_python_abis; then + die "${FUNCNAME}(): Absolute paths cannot be used in ebuilds of packages supporting installation for multiple Python ABIs" + fi + search_paths+=("${root}$1") + else + for PYTHON_ABI in ${iterated_PYTHON_ABIS}; do + search_paths+=("${root}$(python_get_sitedir)/$1") + done + fi + shift + done + else + # Deprecated part of python_mod_cleanup() + + search_paths=("${@#/}") + search_paths=("${search_paths[@]/#/${root}/}") + fi + else + ewarn + ewarn "Deprecation Warning: Not passing of paths to ${FUNCNAME}() is deprecated and will be" + ewarn "disallowed on 2010-09-01. Call ${FUNCNAME}() with paths to Python modules." + ewarn "The ebuild needs to be fixed. Please report a bug, if it has not been already reported." + ewarn + + for dir in "${root}"/usr/lib*; do + if [[ -d "${dir}" && ! -L "${dir}" ]]; then + for sitedir in "${dir}"/python*/site-packages; do + if [[ -d "${sitedir}" ]]; then + search_paths+=("${sitedir}") + fi + done + fi + done + for sitedir in "${root}"/usr/share/jython-*/Lib/site-packages; do + if [[ -d "${sitedir}" ]]; then + search_paths+=("${sitedir}") + fi + done + fi + + _python_clean_compiled_modules "${search_paths[@]}" +} + +# ================================================================================================ +# ===================================== DEPRECATED FUNCTIONS ===================================== +# ================================================================================================ + +# Scheduled for deletion on 2011-01-01. +python_version() { + eerror "Use PYTHON() instead of python variable. Use python_get_*() instead of PYVER* variables." + die "${FUNCNAME}() is banned" +} + +# Scheduled for deletion on 2011-01-01. +python_mod_exists() { + eerror "Use USE dependencies and/or has_version() instead of ${FUNCNAME}()." + die "${FUNCNAME}() is banned" +} + +# Scheduled for deletion on 2011-01-01. +python_tkinter_exists() { + eerror "Use PYTHON_USE_WITH=\"xml\" and python_pkg_setup() instead of ${FUNCNAME}()." + die "${FUNCNAME}() is banned" +} + +# @FUNCTION: python_mod_compile +# @USAGE: [more files ...] +# @DESCRIPTION: +# Given filenames, it will pre-compile the module's .pyc and .pyo. +# This function can be used only in pkg_postinst() phase. +# +# Example: +# python_mod_compile /usr/lib/python2.3/site-packages/pygoogle.py +# +python_mod_compile() { + if ! has "${EAPI:-0}" 0 1 2 || _python_package_supporting_installation_for_multiple_python_abis; then + eerror "Use python_mod_optimize() instead of ${FUNCNAME}()." + die "${FUNCNAME}() cannot be used in this EAPI" + fi + + _python_initialize_prefix_variables + _python_set_color_variables + + if [[ "${FUNCNAME[1]}" != "python_mod_optimize" ]]; then + ewarn + ewarn "Deprecation Warning: ${FUNCNAME}() is deprecated and will be banned on 2010-09-01." + ewarn "Use python_mod_optimize() instead of ${FUNCNAME}()." + ewarn "The ebuild needs to be fixed. Please report a bug, if it has not been already reported." + ewarn + fi + + local f myroot myfiles=() + + # Check if phase is pkg_postinst() + [[ "${EBUILD_PHASE}" != "postinst" ]] && die "${FUNCNAME}() can be used only in pkg_postinst() phase" + + # strip trailing slash + myroot="${EROOT%/}" + + # respect EROOT + for f in "$@"; do + [[ -f "${myroot}/${f}" ]] && myfiles+=("${myroot}/${f}") + done + + PYTHON_ABI="$(PYTHON --ABI)" + + if ((${#myfiles[@]})); then + "$(PYTHON ${PYTHON_ABI})" "${myroot}$(python_get_libdir)/py_compile.py" "${myfiles[@]}" + "$(PYTHON ${PYTHON_ABI})" -O "${myroot}$(python_get_libdir)/py_compile.py" "${myfiles[@]}" &> /dev/null + _python_clean_compiled_modules "${myfiles[@]}" + else + ewarn "No files to compile!" + fi +} diff --git a/eclass/qt4-build.eclass b/eclass/qt4-build.eclass new file mode 100644 index 0000000..9346262 --- /dev/null +++ b/eclass/qt4-build.eclass @@ -0,0 +1,802 @@ +# Copyright 1999-2009 Gentoo Foundation +# Distributed under the terms of the GNU General Public License v2 +# $Header: /var/cvsroot/gentoo-x86/eclass/qt4-build.eclass,v 1.78 2010/07/11 10:32:17 hwoarang Exp $ + +export EMULTILIB_SAVE_VARS="QTBASEDIR QTPREFIXDIR QTBINDIR QTLIBDIR \ + QMAKE_LIBDIR_QT QTPCDIR QTDATADIR QTDOCDIR QTHEADERDIR \ + QTPLUGINDIR QTSYSCONFDIR QTTRANSDIR QTEXAMPLESDIR \ + QTDEMOSDIR QT_INSTALL_PREFIX PLATFORM QMAKE_CFLAGS \ + QMAKE_CXXFLAGS QMAKE_LDFLAGS QT4_EXTRACT_DIRECTORIES" + +# @ECLASS: qt4-build.eclass +# @MAINTAINER: +# Ben de Groot , +# Markos Chandras , +# Caleb Tennis +# Alex Alexander +# @BLURB: Eclass for Qt4 split ebuilds. +# @DESCRIPTION: +# This eclass contains various functions that are used when building Qt4 + +inherit base eutils multilib toolchain-funcs flag-o-matic versionator + +MY_PV=${PV/_/-} +if version_is_at_least 4.5.99999999; then + MY_P=qt-everywhere-opensource-src-${MY_PV} + [[ ${CATEGORY}/${PN} != x11-libs/qt-xmlpatterns ]] && IUSE="+exceptions" +else + MY_P=qt-x11-opensource-src-${MY_PV} +fi + +HOMEPAGE="http://qt.nokia.com/" +SRC_URI="http://get.qt.nokia.com/qt/source/${MY_P}.tar.gz" + +LICENSE="|| ( LGPL-2.1 GPL-3 )" +IUSE+=" debug pch aqua" + +RDEPEND=" + !x11-libs/qt-assistant-${PV}-r9999 + !x11-libs/qt-core-${PV}-r9999 + !x11-libs/qt-dbus-${PV}-r9999 + !x11-libs/qt-demo-${PV}-r9999 + !x11-libs/qt-gui-${PV}-r9999 + !x11-libs/qt-multimedia-${PV}-r9999 + !x11-libs/qt-opengl-${PV}-r9999 + !x11-libs/qt-phonon-${PV}-r9999 + !x11-libs/qt-qt3support-${PV}-r9999 + !x11-libs/qt-script-${PV}-r9999 + !x11-libs/qt-sql-${PV}-r9999 + !x11-libs/qt-svg-${PV}-r9999 + !x11-libs/qt-test-${PV}-r9999 + !x11-libs/qt-webkit-${PV}-r9999 + !x11-libs/qt-xmlpatterns-${PV}-r9999 +" + +S=${WORKDIR}/${MY_P} + +# @FUNCTION: qt4-build_pkg_setup +# @DESCRIPTION: +# Sets up S, MY_P, PATH, and LD_LIBRARY_PATH +qt4-build_pkg_setup() { + [[ ${EAPI} == 2 ]] && use !prefix && EPREFIX= + + # Protect users by not allowing downgrades between releases + # Downgrading revisions within the same release should be allowed + if has_version '>'${CATEGORY}/${P}-r9999 ; then + if [[ -z $I_KNOW_WHAT_I_AM_DOING ]] ; then + eerror "Sanity check to keep you from breaking your system:" + eerror " Downgrading Qt is completely unsupported and will break your system!" + die "aborting to save your system" + else + ewarn "Downgrading Qt is completely unsupported and will break your system!" + fi + fi + + if [[ "${PN}" == "qt-webkit" ]]; then + eshopts_push -s extglob + if is-flagq '-g?(gdb)?([0-9])'; then + echo + ewarn "You have enabled debug info (probably have -g or -ggdb in your \$C{,XX}FLAGS)." + ewarn "You may experience really long compilation times and/or increased memory usage." + ewarn "If compilation fails, please try removing -g{,gdb} before reporting a bug." + ewarn "For more info check out bug #307861" + echo + fi + eshopts_pop + fi + + PATH="${S}/bin${PATH:+:}${PATH}" + if [[ ${CHOST} != *-darwin* ]]; then + LD_LIBRARY_PATH="${S}/lib${LD_LIBRARY_PATH:+:}${LD_LIBRARY_PATH}" + else + DYLD_LIBRARY_PATH="${S}/lib${DYLD_LIBRARY_PATH:+:}${DYLD_LIBRARY_PATH}" + # On MacOS we *need* at least src/gui/kernel/qapplication_mac.mm for + # platform detection. Note: needs to come before any directories to + # avoid extract failure. + [[ ${CHOST} == *-apple-darwin* ]] && \ + QT4_EXTRACT_DIRECTORIES="src/gui/kernel/qapplication_mac.mm + ${QT4_EXTRACT_DIRECTORIES}" + fi + + # Make sure ebuilds use the required EAPI + if [[ ${EAPI} != [23] ]]; then + eerror "The qt4-build eclass requires EAPI=2 or EAPI=3, but this ebuild is using" + eerror "EAPI=${EAPI:-0}. The ebuild author or editor failed. This ebuild needs to be" + eerror "fixed. Using qt4-build eclass without EAPI=2 or EAPI=3 will fail." + die "qt4-build eclass requires EAPI=2 or EAPI=3" + fi + + if ! version_is_at_least 4.1 $(gcc-version); then + ewarn "Using a GCC version lower than 4.1 is not supported!" + fi +} + +# @ECLASS-VARIABLE: QT4_TARGET_DIRECTORIES +# @DESCRIPTION: +# Arguments for build_target_directories. Takes the directories, in which the +# code should be compiled. This is a space-separated list + +# @ECLASS-VARIABLE: QT4_EXTRACT_DIRECTORIES +# @DESCRIPTION: +# Space separated list including the directories that will be extracted from Qt +# tarball + +# @FUNCTION: qt4-build_src_unpack +# @DESCRIPTION: +# Unpacks the sources +qt4-build_src_unpack() { + setqtenv + local target targets= + for target in configure LICENSE.GPL3 LICENSE.LGPL projects.pro \ + src/{qbase,qt_targets,qt_install}.pri bin config.tests mkspecs qmake \ + ${QT4_EXTRACT_DIRECTORIES}; do + targets+=" ${MY_P}/${target}" + done + + echo tar xzf "${DISTDIR}"/${MY_P}.tar.gz ${targets} + tar xzf "${DISTDIR}"/${MY_P}.tar.gz ${targets} || die +} + +# @ECLASS-VARIABLE: PATCHES +# @DESCRIPTION: +# In case you have patches to apply, specify them in PATCHES variable. Make sure +# to specify the full path. This variable is necessary for src_prepare phase. +# example: +# PATCHES="${FILESDIR}"/mypatch.patch +# ${FILESDIR}"/mypatch2.patch" +# + +# @FUNCTION: qt4-build_src_prepare +# @DESCRIPTION: +# Prepare the sources before the configure phase. Strip CFLAGS if necessary, and fix +# source files in order to respect CFLAGS/CXXFLAGS/LDFLAGS specified on /etc/make.conf. +qt4-build_src_prepare() { + setqtenv + cd "${S}" + + # fix qt 4.7 regression that skips -fvisibility=hidden + if version_is_at_least "4.7.0_beta1"; then + sed -e "s/^gcc|g++)/*gcc|*g++)/" \ + -i config.tests/unix/fvisibility.test || + die "visibility fixing sed failed" + fi + # fix libx11 dependency on non X packages + if version_is_at_least "4.7.0_beta2"; then + local NOLIBX11PKG="qt-core qt-dbus qt-script qt-sql qt-test qt-xmlpatterns" + hasq ${PN} ${NOLIBX11PKG} && qt_nolibx11 + [[ ${PN} == "qt-assistant" ]] && qt_assistant_cleanup + fi + + if use aqua; then + # provide a proper macx-g++-64 + use x64-macos && ln -s macx-g++ mkspecs/$(qt_mkspecs_dir) + + sed -e '/^CONFIG/s:app_bundle::' \ + -e '/^CONFIG/s:plugin_no_soname:plugin_with_soname absolute_library_soname:' \ + -i mkspecs/$(qt_mkspecs_dir)/qmake.conf || die "sed failed" + fi + + if [[ ${PN} != qt-core ]]; then + skip_qmake_build_patch + skip_project_generation_patch + symlink_binaries_to_buildtree + fi + + if [[ ${CHOST} == *86*-apple-darwin* ]] ; then + # qmake bus errors with -O2 but -O3 works + replace-flags -O2 -O3 + fi + + # Bug 178652 + if [[ $(gcc-major-version) == 3 ]] && use amd64; then + ewarn "Appending -fno-gcse to CFLAGS/CXXFLAGS" + append-flags -fno-gcse + fi + + # Unsupported old gcc versions - hardened needs this :( + if [[ $(gcc-major-version) -lt 4 ]] ; then + ewarn "Appending -fno-stack-protector to CXXFLAGS" + append-cxxflags -fno-stack-protector + # Bug 253127 + sed -e "/^QMAKE_CFLAGS\t/ s:$: -fno-stack-protector-all:" \ + -i "${S}"/mkspecs/common/g++.conf || die "sed ${S}/mkspecs/common/g++.conf failed" + fi + + # Bug 261632 + if use ppc64; then + ewarn "Appending -mminimal-toc to CFLAGS/CXXFLAGS" + append-flags -mminimal-toc + fi + + # Bug 282984 && Bug 295530 + sed -e "s:\(^SYSTEM_VARIABLES\):CC=$(tc-getCC)\nCXX=$(tc-getCXX)\nCFLAGS=\"${CFLAGS}\"\nCXXFLAGS=\"${CXXFLAGS}\"\nLDFLAGS=\"${LDFLAGS}\"\n\1:" \ + -i configure || die "sed qmake compilers failed" + # bug 321335 + if version_is_at_least 4.6; then + find ./config.tests/unix -name "*.test" -type f -exec grep -lZ \$MAKE '{}' \; | \ + xargs -0 \ + sed -e "s:\(\$MAKE\):\1 CC=$(tc-getCC) CXX=$(tc-getCXX) LD=$(tc-getCXX) LINK=$(tc-getCXX):g" \ + -i || die "sed test compilers failed" + fi + + # Bug 172219 + sed -e "s:X11R6/::" \ + -i "${S}"/mkspecs/$(qt_mkspecs_dir)/qmake.conf || die "sed ${S}/mkspecs/$(qt_mkspecs_dir)/qmake.conf failed" + + if [[ ${CHOST} == *-darwin* ]]; then + # Set FLAGS *and* remove -arch, since our gcc-apple is multilib + # crippled (by design) :/ + sed -e "s:QMAKE_CFLAGS_RELEASE.*=.*:QMAKE_CFLAGS_RELEASE=${CFLAGS}:" \ + -e "s:QMAKE_CXXFLAGS_RELEASE.*=.*:QMAKE_CXXFLAGS_RELEASE=${CXXFLAGS}:" \ + -e "s:QMAKE_LFLAGS_RELEASE.*=.*:QMAKE_LFLAGS_RELEASE=-headerpad_max_install_names ${LDFLAGS}:" \ + -e "s:-arch\s\w*::g" \ + -i mkspecs/common/mac-g++.conf || die "sed mkspecs/common/mac-g++.conf failed" + + # Fix configure's -arch settings that appear in qmake/Makefile and also + # fix arch handling (automagically duplicates our -arch arg and breaks + # pch). Additionally disable Xarch support. + sed \ + -e "s:-arch i386::" \ + -e "s:-arch ppc::" \ + -e "s:-arch x86_64::" \ + -e "s:-arch ppc64::" \ + -e "s:-arch \$i::" \ + -e "/if \[ ! -z \"\$NATIVE_64_ARCH\" \]; then/,/fi/ d" \ + -e "s:CFG_MAC_XARCH=yes:CFG_MAC_XARCH=no:g" \ + -e "s:-Xarch_x86_64::g" \ + -e "s:-Xarch_ppc64::g" \ + -i configure mkspecs/common/mac-g++.conf || die "sed configure failed" + + # On Snow Leopard don't fall back to 10.5 deployment target. + if [[ ${CHOST} == *-apple-darwin10 ]] ; then + sed -e "s:QMakeVar set QMAKE_MACOSX_DEPLOYMENT_TARGET.*:QMakeVar set QMAKE_MACOSX_DEPLOYMENT_TARGET 10.6:g" \ + -e "s:-mmacosx-version-min=10.[0-9]:-mmacosx-version-min=10.6:g" \ + -i configure mkspecs/common/mac-g++.conf || die "sed configure failed" + fi + fi + + # this one is needed for all systems with a separate -liconv, apart from + # Darwin, for which the sources already cater for -liconv + if use !elibc_glibc && [[ ${CHOST} != *-darwin* ]] ; then + sed \ + -e "s|mac:LIBS += -liconv|LIBS += -liconv|g" \ + -i config.tests/unix/iconv/iconv.pro \ + || die "sed on iconv.pro failed" + fi + + # we need some patches for Solaris + sed -i \ + -e '/^QMAKE_LFLAGS_THREAD/a\QMAKE_LFLAGS_DYNAMIC_LIST = -Wl,--dynamic-list,' \ + mkspecs/$(qt_mkspecs_dir)/qmake.conf || die + # use GCC over SunStudio + sed -i -e '/PLATFORM=solaris-cc/s/cc/g++/' configure || die + # don't flirt with non-Prefix stuff, we're quite possessive + sed -i -e '/^QMAKE_\(LIB\|INC\)DIR\(_X11\|_OPENGL\|\)\t/s/=.*$/=/' \ + mkspecs/$(qt_mkspecs_dir)/qmake.conf || die + # strip predefined CFLAGS from qmake ( bug #312689 ) + sed -i '/^QMAKE_CFLAGS_\(RELEASE\|DEBUG\)/s:+=.*:+=:' mkspecs/common/g++.conf + + base_src_prepare +} + +# @FUNCTION: qt4-build_src_configure +# @DESCRIPTION: +# Default configure phase +qt4-build_src_configure() { + setqtenv + myconf="$(standard_configure_options) ${myconf}" + + # this one is needed for all systems with a separate -liconv, apart from + # Darwin, for which the sources already cater for -liconv + use !elibc_glibc && [[ ${CHOST} != *-darwin* ]] && \ + myconf+=" -liconv" + + if has glib ${IUSE//+} && use glib; then + # use -I, -L and -l from configure + local glibflags="$(pkg-config --cflags --libs glib-2.0 gthread-2.0)" + # avoid the -pthread argument + myconf+=" ${glibflags//-pthread}" + unset glibflags + fi + + if use aqua ; then + # On (snow) leopard use the new (frameworked) cocoa code. + if [[ ${CHOST##*-darwin} -ge 9 ]] ; then + myconf+=" -cocoa -framework" + + # We are crazy and build cocoa + qt3support :-) + if use qt3support; then + sed -e "/case \"\$PLATFORM,\$CFG_MAC_COCOA\" in/,/;;/ s|CFG_QT3SUPPORT=\"no\"|CFG_QT3SUPPORT=\"yes\"|" \ + -i configure + fi + + # We need the source's headers, not the installed ones. + myconf+=" -I${S}/include" + + # Add hint for the framework location. + myconf+=" -F${QTLIBDIR}" + fi + else + # freetype2 include dir is non-standard, thus include it on configure + # use -I from configure + myconf+=" $(pkg-config --cflags freetype2)" + fi + + echo ./configure ${myconf} + ./configure ${myconf} || die "./configure failed" + myconf="" +} + +# @FUNCTION: qt4-build_src_compile +# @DESCRIPTION: Actual compile phase +qt4-build_src_compile() { + setqtenv + + build_directories ${QT4_TARGET_DIRECTORIES} +} + +# @FUNCTION: qt4-build_src_test +# @DESCRIPTION: +# Runs tests only in target directories. +qt4-build_src_test() { + for dir in ${QT4_TARGET_DIRECTORIES}; do + emake -j1 check -C ${dir} + done +} + +# @FUNCTION: fix_includes +# @DESCRIPTION: +# For MacOSX we need to add some symlinks when frameworks are +# being used, to avoid complications with some more or less stupid packages. +fix_includes() { + if use aqua && [[ ${CHOST##*-darwin} -ge 9 ]] ; then + # Some packages tend to include + dodir "${QTHEADERDIR#${EPREFIX}}"/Qt + + # Fake normal headers when frameworks are installed... eases life later on + local dest f + for frw in "${D}${QTLIBDIR}"/*.framework; do + [[ -e "${frw}"/Headers ]] || continue + f=$(basename ${frw}) + dest="${QTHEADERDIR#${EPREFIX}}"/${f%.framework} + dosym "${QTLIBDIR#${EPREFIX}}"/${f}/Headers "${dest}" + + # Link normal headers as well. + for hdr in "${D}/${QTLIBDIR}/${f}"/Headers/*; do + h=$(basename ${hdr}) + dosym "${QTLIBDIR#${EPREFIX}}"/${f}/Headers/${h} "${QTHEADERDIR#${EPREFIX}}"/Qt/${h} + done + done + fi +} + +# @FUNCTION: qt4-build_src_install +# @DESCRIPTION: +# Perform the actual installation including some library fixes. +qt4-build_src_install() { + [[ ${EAPI} == 2 ]] && use !prefix && ED=${D} + setqtenv + install_directories ${QT4_TARGET_DIRECTORIES} + install_qconfigs + fix_library_files + fix_includes + # remove .la files since we are building only shared Qt libraries + find "${D}"${QTLIBDIR} -name "*.la" -print0 | xargs -0 rm + prep_ml_includes +} + +# @FUNCTION: setqtenv +setqtenv() { + # Set up installation directories + QTBASEDIR=${EPREFIX}/usr/$(get_libdir)/qt4 + QTPREFIXDIR=${EPREFIX}/usr + QTBINDIR=${EPREFIX}/usr/bin + QTLIBDIR=${EPREFIX}/usr/$(get_libdir)/qt4 + QMAKE_LIBDIR_QT=${QTLIBDIR} + QTPCDIR=${EPREFIX}/usr/$(get_libdir)/pkgconfig + QTDATADIR=${EPREFIX}/usr/share/qt4 + QTDOCDIR=${EPREFIX}/usr/share/doc/qt-${PV} + QTHEADERDIR=${EPREFIX}/usr/include/qt4 + QTPLUGINDIR=${QTLIBDIR}/plugins + QTSYSCONFDIR=${EPREFIX}/etc/qt4 + QTTRANSDIR=${QTDATADIR}/translations + QTEXAMPLESDIR=${QTDATADIR}/examples + QTDEMOSDIR=${QTDATADIR}/demos + QT_INSTALL_PREFIX=${EPREFIX}/usr/$(get_libdir)/qt4 + PLATFORM=$(qt_mkspecs_dir) + + unset QMAKESPEC +} + +# @FUNCTION: standard_configure_options +# @DESCRIPTION: +# Sets up some standard configure options, like libdir (if necessary), whether +# debug info is wanted or not. +standard_configure_options() { + local myconf= + + [[ $(get_libdir) != lib ]] && myconf+=" -L${EPREFIX}/usr/$(get_libdir)" + + # Disable visibility explicitly if gcc version isn't 4 + if [[ $(gcc-major-version) -lt 4 ]]; then + myconf+=" -no-reduce-exports" + fi + + # precompiled headers doesn't work on hardened, where the flag is masked. + myconf+=" $(qt_use pch)" + + if use debug; then + myconf+=" -debug" + else + myconf+=" -release" + fi + myconf+=" -no-separate-debug-info" + + use aqua && myconf+=" -no-framework" + + # ARCH is set on Gentoo. Qt now falls back to generic on an unsupported + # $(tc-arch). Therefore we convert it to supported values. + case "$(tc-arch)" in + amd64|x64-*) myconf+=" -arch x86_64" ;; + ppc-macos) myconf+=" -arch ppc" ;; + ppc|ppc64|ppc-*) myconf+=" -arch powerpc" ;; + sparc|sparc-*) myconf+=" -arch sparc" ;; + x86-macos) myconf+=" -arch x86" ;; + x86|x86-*) myconf+=" -arch i386" ;; + alpha|arm|ia64|mips|s390|sparc) myconf+=" -arch $(tc-arch)" ;; + hppa|sh) myconf+=" -arch generic" ;; + *) die "$(tc-arch) is unsupported by this eclass. Please file a bug." ;; + esac + + # 4.5: build everything but qt-xmlpatterns w/o exceptions + # 4.6: exceptions USE flag + local exceptions="-exceptions" + case "${PV}" in + 4.5.*) + [[ ${PN} == "qt-xmlpatterns" ]] || exceptions="-no-exceptions" + ;; + *) + has exceptions "${IUSE//+}" && exceptions="$(qt_use exceptions)" + ;; + esac + + # note about -reduce-relocations: + # That flag seems to introduce major breakage to applications, + # mostly to be seen as a core dump with the message "QPixmap: Must + # construct a QApplication before a QPaintDevice" on Solaris + # -- Daniel Vergien + [[ ${CHOST} != *-solaris* ]] && myconf+=" -reduce-relocations" + + myconf+=" -platform $(qt_mkspecs_dir) -stl -verbose -largefile -confirm-license + -prefix ${QTPREFIXDIR} -bindir ${QTBINDIR} -libdir ${QTLIBDIR} + -datadir ${QTDATADIR} -docdir ${QTDOCDIR} -headerdir ${QTHEADERDIR} + -plugindir ${QTPLUGINDIR} -sysconfdir ${QTSYSCONFDIR} + -translationdir ${QTTRANSDIR} -examplesdir ${QTEXAMPLESDIR} + -demosdir ${QTDEMOSDIR} -silent -fast -opensource + ${exceptions} + -nomake examples -nomake demos" + + echo "${myconf}" +} + +# @FUNCTION: build_directories +# @USAGE: < directories > +# @DESCRIPTION: +# Compiles the code in $QT4_TARGET_DIRECTORIES +build_directories() { + for x in "$@"; do + pushd "${S}"/${x} >/dev/null + # avoid running over the maximum argument number, bug #299810 + { + echo "${S}"/mkspecs/common/*.conf + find "${S}" -name '*.pr[io]' + } | xargs sed -i -e "s:\$\$\[QT_INSTALL_LIBS\]:${EPREFIX}/usr/$(get_libdir)/qt4:g" || die + "${S}"/bin/qmake "LIBS+=-L${QTLIBDIR}" "CONFIG+=nostrip" || die "qmake failed" + emake CC="$(tc-getCC)" \ + CXX="$(tc-getCXX)" \ + LINK="$(tc-getCXX)" || die "emake failed" + popd >/dev/null + done +} + +# @FUNCTION: install_directories +# @USAGE: < directories > +# @DESCRIPTION: +# run emake install in the given directories, which are separated by spaces +install_directories() { + for x in "$@"; do + pushd "${S}"/${x} >/dev/null || die "Can't pushd ${S}/${x}" + emake INSTALL_ROOT="${D}" install || die "emake install failed" + popd >/dev/null || die "Can't popd from ${S}/${x}" + done +} + +# @ECLASS-VARIABLE: QCONFIG_ADD +# @DESCRIPTION: +# List options that need to be added to QT_CONFIG in qconfig.pri +: ${QCONFIG_ADD:=} + +# @ECLASS-VARIABLE: QCONFIG_REMOVE +# @DESCRIPTION: +# List options that need to be removed from QT_CONFIG in qconfig.pri +: ${QCONFIG_REMOVE:=} + +# @ECLASS-VARIABLE: QCONFIG_DEFINE +# @DESCRIPTION: +# List variables that should be defined at the top of QtCore/qconfig.h +: ${QCONFIG_DEFINE:=} + +# @FUNCTION: install_qconfigs +# @DESCRIPTION: Install gentoo-specific mkspecs configurations +install_qconfigs() { + local x + if [[ -n ${QCONFIG_ADD} || -n ${QCONFIG_REMOVE} ]]; then + for x in QCONFIG_ADD QCONFIG_REMOVE; do + [[ -n ${!x} ]] && echo ${x}=${!x} >> "${T}"/${PN}-qconfig.pri + done + insinto ${QTDATADIR#${EPREFIX}}/mkspecs/gentoo + doins "${T}"/${PN}-qconfig.pri || die "installing ${PN}-qconfig.pri failed" + fi + + if [[ -n ${QCONFIG_DEFINE} ]]; then + for x in ${QCONFIG_DEFINE}; do + echo "#define ${x}" >> "${T}"/gentoo-${PN}-qconfig.h + done + insinto ${QTHEADERDIR#${EPREFIX}}/Gentoo + doins "${T}"/gentoo-${PN}-qconfig.h || die "installing ${PN}-qconfig.h failed" + fi +} + +# @FUNCTION: generate_qconfigs +# @DESCRIPTION: Generates gentoo-specific configurations +generate_qconfigs() { + if [[ -n ${QCONFIG_ADD} || -n ${QCONFIG_REMOVE} || -n ${QCONFIG_DEFINE} || ${CATEGORY}/${PN} == x11-libs/qt-core ]]; then + local x qconfig_add qconfig_remove qconfig_new + for x in "${ROOT}${QTDATADIR}"/mkspecs/gentoo/*-qconfig.pri; do + [[ -f ${x} ]] || continue + qconfig_add+=" $(sed -n 's/^QCONFIG_ADD=//p' "${x}")" + qconfig_remove+=" $(sed -n 's/^QCONFIG_REMOVE=//p' "${x}")" + done + + # these error checks do not use die because dying in pkg_post{inst,rm} + # just makes things worse. + if [[ -e "${ROOT}${QTDATADIR}"/mkspecs/gentoo/qconfig.pri ]]; then + # start with the qconfig.pri that qt-core installed + if ! cp "${ROOT}${QTDATADIR}"/mkspecs/gentoo/qconfig.pri \ + "${ROOT}${QTDATADIR}"/mkspecs/qconfig.pri; then + eerror "cp qconfig failed." + return 1 + fi + + # generate list of QT_CONFIG entries from the existing list + # including qconfig_add and excluding qconfig_remove + for x in $(sed -n 's/^QT_CONFIG +=//p' \ + "${ROOT}${QTDATADIR}"/mkspecs/qconfig.pri) ${qconfig_add}; do + hasq ${x} ${qconfig_remove} || qconfig_new+=" ${x}" + done + + # replace the existing QT_CONFIG list with qconfig_new + if ! sed -i -e "s/QT_CONFIG +=.*/QT_CONFIG += ${qconfig_new}/" \ + "${ROOT}${QTDATADIR}"/mkspecs/qconfig.pri; then + eerror "Sed for QT_CONFIG failed" + return 1 + fi + + # create Gentoo/qconfig.h + if [[ ! -e ${ROOT}${QTHEADERDIR}/Gentoo ]]; then + if ! mkdir -p "${ROOT}${QTHEADERDIR}"/Gentoo; then + eerror "mkdir ${QTHEADERDIR}/Gentoo failed" + return 1 + fi + fi + : > "${ROOT}${QTHEADERDIR}"/Gentoo/gentoo-qconfig.h + for x in "${ROOT}${QTHEADERDIR}"/Gentoo/gentoo-*-qconfig.h; do + [[ -f ${x} ]] || continue + cat "${x}" >> "${ROOT}${QTHEADERDIR}"/Gentoo/gentoo-qconfig.h + done + else + rm -f "${ROOT}${QTDATADIR}"/mkspecs/qconfig.pri + rm -f "${ROOT}${QTHEADERDIR}"/Gentoo/gentoo-qconfig.h + rmdir "${ROOT}${QTDATADIR}"/mkspecs \ + "${ROOT}${QTDATADIR}" \ + "${ROOT}${QTHEADERDIR}"/Gentoo \ + "${ROOT}${QTHEADERDIR}" 2>/dev/null + fi + fi +} + +# @FUNCTION: qt4-build_pkg_postrm +# @DESCRIPTION: Generate configurations when the package is completely removed +qt4-build_pkg_postrm() { + generate_qconfigs +} + +# @FUNCTION: qt4-build_pkg_postinst +# @DESCRIPTION: Generate configuration, plus throws a message about possible +# breakages and proposed solutions. +qt4-build_pkg_postinst() { + generate_qconfigs +} + +# @FUNCTION: skip_qmake_build_patch +# @DESCRIPTION: +# Don't need to build qmake, as it's already installed from qt-core +skip_qmake_build_patch() { + # Don't need to build qmake, as it's already installed from qt-core + sed -i -e "s:if true:if false:g" "${S}"/configure || die "Sed failed" +} + +# @FUNCTION: skip_project_generation_patch +# @DESCRIPTION: +# Exit the script early by throwing in an exit before all of the .pro files are scanned +skip_project_generation_patch() { + # Exit the script early by throwing in an exit before all of the .pro files are scanned + sed -e "s:echo \"Finding:exit 0\n\necho \"Finding:g" \ + -i "${S}"/configure || die "Sed failed" +} + +# @FUNCTION: symlink_binaries_to_buildtree +# @DESCRIPTION: +# Symlink generated binaries to buildtree so they can be used during compilation +# time +symlink_binaries_to_buildtree() { + for bin in qmake moc uic rcc; do + ln -s ${QTBINDIR}/${bin} "${S}"/bin/ || die "Symlinking ${bin} to ${S}/bin failed." + done +} + +# @FUNCTION: fix_library_files +# @DESCRIPTION: +# Fixes the pathes in *.la, *.prl, *.pc, as they are wrong due to sandbox and +# moves the *.pc-files into the pkgconfig directory +fix_library_files() { + for libfile in "${D}"/${QTLIBDIR}/{*.la,*.prl,pkgconfig/*.pc}; do + if [[ -e ${libfile} ]]; then + sed -i -e "s:${S}/lib:${QTLIBDIR}:g" ${libfile} || die "Sed on ${libfile} failed." + fi + done + + # pkgconfig files refer to WORKDIR/bin as the moc and uic locations. Fix: + for libfile in "${D}"/${QTLIBDIR}/pkgconfig/*.pc; do + if [[ -e ${libfile} ]]; then + sed -i -e "s:${S}/bin:${QTBINDIR}:g" ${libfile} || die "Sed failed" + + # Move .pc files into the pkgconfig directory + dodir ${QTPCDIR#${EPREFIX}} + mv ${libfile} "${D}"/${QTPCDIR}/ \ + || die "Moving ${libfile} to ${D}/${QTPCDIR}/ failed." + fi + done + + # Don't install an empty directory + rmdir "${D}"/${QTLIBDIR}/pkgconfig +} + +# @FUNCTION: qt_use +# @USAGE: < flag > [ feature ] [ enableval ] +# @DESCRIPTION: +# This will echo "${enableval}-${feature}" if is enabled, or +# "-no-${feature} if the flag is disabled. If [feature] is not specified +# will be used for that. If [enableval] is not specified, it omits the +# assignment-part +qt_use() { + local flag=$1 + local feature=$1 + local enableval= + + [[ -n $2 ]] && feature=$2 + [[ -n $3 ]] && enableval=-$3 + + if use ${flag}; then + echo "${enableval}-${feature}" + else + echo "-no-${feature}" + fi +} + +# @FUNCTION: qt_mkspecs_dir +# @RETURN: the specs-directory w/o path +# @DESCRIPTION: +# Allows us to define which mkspecs dir we want to use. +qt_mkspecs_dir() { + # Allows us to define which mkspecs dir we want to use. + local spec + + case ${CHOST} in + *-freebsd*|*-dragonfly*) + spec=freebsd ;; + *-openbsd*) + spec=openbsd ;; + *-netbsd*) + spec=netbsd ;; + *-darwin*) + if use aqua; then + # mac with carbon/cocoa + spec=macx + else + # darwin/mac with x11 + spec=darwin + fi + ;; + *-solaris*) + spec=solaris ;; + *-linux-*|*-linux) + spec=linux ;; + *) + die "Unknown CHOST, no platform choosen." + esac + + CXX=$(tc-getCXX) + if [[ ${CXX} == *g++* ]]; then + spec+=-g++ + elif [[ ${CXX} == *icpc* ]]; then + spec+=-icc + else + die "Unknown compiler ${CXX}." + fi + if [[ -n ${LIBDIR/lib} ]]; then + spec+=-${LIBDIR/lib} + fi + + # Add -64 for 64bit profiles + if use x64-freebsd || + use amd64-linux || + use x64-macos || + use x64-solaris || + use sparc64-solaris + then + spec+=-64 + fi + + echo "${spec}" +} + +# @FUNCTION: qt_assistant_cleanup +# @RETURN: nothing +# @DESCRIPTION: +# Tries to clean up tools.pro for qt-assistant ebuilds +# Meant to be called in src_prepare +qt_assistant_cleanup() { + # different versions (and branches...) may need different handling, + # add a case if you need special handling + case "${MY_PV_EXTRA}" in + *kde-qt*) + sed -e "/^[ \t]*porting/,/^[ \t]*win32.*activeqt$/d" \ + -e "/mac/,/^embedded.*makeqpf$/d" \ + -i tools/tools.pro || die "patching tools.pro failed" + ;; + *) + sed -e "/^[ \t]*porting/,/^[ \t]*win32.*activeqt$/d" \ + -e "/mac/,/^embedded.*makeqpf$/d" \ + -e "s/^\([ \t]*pixeltool\) /\1 qdoc3 /" \ + -i tools/tools.pro || die "patching tools.pro failed" + ;; + esac +} + +# @FUNCTION: qt_nolibx11 +# @RETURN: nothing +# @DESCRIPTION: +# Ignore X11 tests for packages that don't need X libraries installed +qt_nolibx11() { + einfo "removing X11 check to allow X-less compilation" + sed -i "/unixtests\/compile.test.*config.tests\/x11\/xlib/,/fi$/d" "${S}"/configure || + die "x11 check sed failed" +} + +EXPORT_FUNCTIONS pkg_setup src_unpack src_prepare src_configure src_compile src_install src_test pkg_postrm pkg_postinst diff --git a/eclass/toolchain.eclass b/eclass/toolchain.eclass new file mode 100644 index 0000000..0845b7e --- /dev/null +++ b/eclass/toolchain.eclass @@ -0,0 +1,2494 @@ +# Copyright 1999-2008 Gentoo Foundation +# Distributed under the terms of the GNU General Public License v2 +# $Header: /var/cvsroot/gentoo-x86/eclass/toolchain.eclass,v 1.436 2010/07/22 01:26:33 dirtyepic Exp $ +# +# Maintainer: Toolchain Ninjas + +HOMEPAGE="http://gcc.gnu.org/" +LICENSE="GPL-2 LGPL-2.1" +RESTRICT="strip" # cross-compilers need controlled stripping + +#---->> eclass stuff <<---- +inherit eutils versionator libtool toolchain-funcs flag-o-matic gnuconfig multilib fixheadtails + +EXPORT_FUNCTIONS pkg_setup src_unpack src_compile src_test pkg_preinst src_install pkg_postinst pkg_prerm pkg_postrm +DESCRIPTION="Based on the ${ECLASS} eclass" + +FEATURES=${FEATURES/multilib-strict/} + +toolchain_pkg_setup() { + gcc_pkg_setup +} +toolchain_src_unpack() { + gcc_src_unpack +} +toolchain_src_compile() { + gcc_src_compile +} +toolchain_src_test() { + gcc_src_test +} +toolchain_pkg_preinst() { + ${ETYPE}_pkg_preinst +} +toolchain_src_install() { + ${ETYPE}_src_install +} +toolchain_pkg_postinst() { + ${ETYPE}_pkg_postinst +} +toolchain_pkg_prerm() { + ${ETYPE}_pkg_prerm +} +toolchain_pkg_postrm() { + ${ETYPE}_pkg_postrm +} +#----<< eclass stuff >>---- + + +#---->> globals <<---- +export CTARGET=${CTARGET:-${CHOST}} +if [[ ${CTARGET} = ${CHOST} ]] ; then + if [[ ${CATEGORY/cross-} != ${CATEGORY} ]] ; then + export CTARGET=${CATEGORY/cross-} + fi +fi +is_crosscompile() { + [[ ${CHOST} != ${CTARGET} ]] +} + +tc_version_is_at_least() { version_is_at_least "$1" "${2:-${GCC_PV}}" ; } + + +GCC_PV=${TOOLCHAIN_GCC_PV:-${PV}} +GCC_PVR=${GCC_PV} +[[ ${PR} != "r0" ]] && GCC_PVR=${GCC_PVR}-${PR} +GCC_RELEASE_VER=$(get_version_component_range 1-3 ${GCC_PV}) +GCC_BRANCH_VER=$(get_version_component_range 1-2 ${GCC_PV}) +GCCMAJOR=$(get_version_component_range 1 ${GCC_PV}) +GCCMINOR=$(get_version_component_range 2 ${GCC_PV}) +GCCMICRO=$(get_version_component_range 3 ${GCC_PV}) +[[ ${BRANCH_UPDATE-notset} == "notset" ]] && BRANCH_UPDATE=$(get_version_component_range 4 ${GCC_PV}) + +# According to gcc/c-cppbuiltin.c, GCC_CONFIG_VER MUST match this regex. +# ([^0-9]*-)?[0-9]+[.][0-9]+([.][0-9]+)?([- ].*)? +GCC_CONFIG_VER=${GCC_CONFIG_VER:-$(replace_version_separator 3 '-' ${GCC_PV})} + +# Pre-release support +if [[ ${GCC_PV} != ${GCC_PV/_pre/-} ]] ; then + PRERELEASE=${GCC_PV/_pre/-} +fi +# make _alpha and _beta ebuilds automatically use a snapshot +if [[ ${GCC_PV} == *_alpha* ]] ; then + SNAPSHOT=${GCC_BRANCH_VER}-${GCC_PV##*_alpha} +elif [[ ${GCC_PV} == *_beta* ]] ; then + SNAPSHOT=${GCC_BRANCH_VER}-${GCC_PV##*_beta} +elif [[ ${GCC_PV} == *_rc* ]] ; then + SNAPSHOT=${GCC_PV%_rc*}-RC-${GCC_PV##*_rc} +fi +export GCC_FILESDIR=${GCC_FILESDIR:-${FILESDIR}} + +if [[ ${ETYPE} == "gcc-library" ]] ; then + GCC_VAR_TYPE=${GCC_VAR_TYPE:-non-versioned} + GCC_LIB_COMPAT_ONLY=${GCC_LIB_COMPAT_ONLY:-true} + GCC_TARGET_NO_MULTILIB=${GCC_TARGET_NO_MULTILIB:-true} +else + GCC_VAR_TYPE=${GCC_VAR_TYPE:-versioned} + GCC_LIB_COMPAT_ONLY="false" + GCC_TARGET_NO_MULTILIB=${GCC_TARGET_NO_MULTILIB:-false} +fi + +PREFIX=${TOOLCHAIN_PREFIX:-/usr} + +if [[ ${GCC_VAR_TYPE} == "versioned" ]] ; then + if tc_version_is_at_least 3.4.0 ; then + LIBPATH=${TOOLCHAIN_LIBPATH:-${PREFIX}/lib/gcc/${CTARGET}/${GCC_CONFIG_VER}} + else + LIBPATH=${TOOLCHAIN_LIBPATH:-${PREFIX}/lib/gcc-lib/${CTARGET}/${GCC_CONFIG_VER}} + fi + INCLUDEPATH=${TOOLCHAIN_INCLUDEPATH:-${LIBPATH}/include} + if is_crosscompile ; then + BINPATH=${TOOLCHAIN_BINPATH:-${PREFIX}/${CHOST}/${CTARGET}/gcc-bin/${GCC_CONFIG_VER}} + else + BINPATH=${TOOLCHAIN_BINPATH:-${PREFIX}/${CTARGET}/gcc-bin/${GCC_CONFIG_VER}} + fi + DATAPATH=${TOOLCHAIN_DATAPATH:-${PREFIX}/share/gcc-data/${CTARGET}/${GCC_CONFIG_VER}} + # Dont install in /usr/include/g++-v3/, but in gcc internal directory. + # We will handle /usr/include/g++-v3/ with gcc-config ... + STDCXX_INCDIR=${TOOLCHAIN_STDCXX_INCDIR:-${LIBPATH}/include/g++-v${GCC_BRANCH_VER/\.*/}} +elif [[ ${GCC_VAR_TYPE} == "non-versioned" ]] ; then + # using non-versioned directories to install gcc, like what is currently + # done for ppc64 and 3.3.3_pre, is a BAD IDEA. DO NOT do it!! However... + # setting up variables for non-versioned directories might be useful for + # specific gcc targets, like libffi. Note that we dont override the value + # returned by get_libdir here. + LIBPATH=${TOOLCHAIN_LIBPATH:-${PREFIX}/$(get_libdir)} + INCLUDEPATH=${TOOLCHAIN_INCLUDEPATH:-${PREFIX}/include} + BINPATH=${TOOLCHAIN_BINPATH:-${PREFIX}/bin} + DATAPATH=${TOOLCHAIN_DATAPATH:-${PREFIX}/share} + STDCXX_INCDIR=${TOOLCHAIN_STDCXX_INCDIR:-${PREFIX}/include/g++-v3} +fi + +#----<< globals >>---- + + +#---->> SLOT+IUSE logic <<---- +if [[ ${ETYPE} == "gcc-library" ]] ; then + IUSE="nls build test" + SLOT="${CTARGET}-${SO_VERSION_SLOT:-5}" +else + IUSE="multislot nptl test" + + if [[ ${PN} != "kgcc64" && ${PN} != gcc-* ]] ; then + IUSE="${IUSE} altivec build fortran nls nocxx" + [[ -n ${PIE_VER} ]] && IUSE="${IUSE} nopie" + [[ -n ${PP_VER} ]] && IUSE="${IUSE} nossp" + [[ -n ${SPECS_VER} ]] && IUSE="${IUSE} nossp" + [[ -n ${HTB_VER} ]] && IUSE="${IUSE} boundschecking" + [[ -n ${D_VER} ]] && IUSE="${IUSE} d" + + if tc_version_is_at_least 3 ; then + IUSE="${IUSE} bootstrap doc gcj gtk hardened libffi multilib objc vanilla" + + # gcc-{nios2,bfin} don't accept these + if [[ ${PN} == "gcc" ]] ; then + IUSE="${IUSE} n32 n64" + fi + + tc_version_is_at_least "4.0" && IUSE="${IUSE} objc-gc mudflap" + tc_version_is_at_least "4.1" && IUSE="${IUSE} objc++" + tc_version_is_at_least "4.2" && IUSE="${IUSE} openmp" + tc_version_is_at_least "4.2" && IUSE="${IUSE} nsplugin" + tc_version_is_at_least "4.3" && IUSE="${IUSE} fixed-point" + if tc_version_is_at_least "4.4" ; then + IUSE="${IUSE} graphite" + [[ -n ${SPECS_VER} ]] && IUSE="${IUSE} nossp" + fi + tc_version_is_at_least "4.5" && IUSE="${IUSE} lto" + fi + fi + + # Support upgrade paths here or people get pissed + if use multislot ; then + SLOT="${CTARGET}-${GCC_CONFIG_VER}" + elif is_crosscompile; then + SLOT="${CTARGET}-${GCC_BRANCH_VER}" + else + SLOT="${GCC_BRANCH_VER}" + fi +fi +#----<< SLOT+IUSE logic >>---- + + +#---->> S + SRC_URI essentials <<---- + +# This function sets the source directory depending on whether we're using +# a prerelease, snapshot, or release tarball. To use it, just set S with: +# +# S="$(gcc_get_s_dir)" +# +# Travis Tilley (03 Sep 2004) +# +gcc_get_s_dir() { + local GCC_S + if [[ -n ${PRERELEASE} ]] ; then + GCC_S=${WORKDIR}/gcc-${PRERELEASE} + elif [[ -n ${SNAPSHOT} ]] ; then + GCC_S=${WORKDIR}/gcc-${SNAPSHOT} + else + GCC_S=${WORKDIR}/gcc-${GCC_RELEASE_VER} + fi + echo "${GCC_S}" +} + +# This function handles the basics of setting the SRC_URI for a gcc ebuild. +# To use, set SRC_URI with: +# +# SRC_URI="$(get_gcc_src_uri)" +# +# Other than the variables normally set by portage, this function's behavior +# can be altered by setting the following: +# +# SNAPSHOT +# If set, this variable signals that we should be using a snapshot +# of gcc from ftp://sources.redhat.com/pub/gcc/snapshots/. It is +# expected to be in the format "YYYY-MM-DD". Note that if the ebuild +# has a _pre suffix, this variable is ignored and the prerelease +# tarball is used instead. +# +# BRANCH_UPDATE +# If set, this variable signals that we should be using the main +# release tarball (determined by ebuild version) and applying a +# CVS branch update patch against it. The location of this branch +# update patch is assumed to be in ${GENTOO_TOOLCHAIN_BASE_URI}. +# Just like with SNAPSHOT, this variable is ignored if the ebuild +# has a _pre suffix. +# +# PATCH_VER +# PATCH_GCC_VER +# This should be set to the version of the gentoo patch tarball. +# The resulting filename of this tarball will be: +# gcc-${PATCH_GCC_VER:-${GCC_RELEASE_VER}}-patches-${PATCH_VER}.tar.bz2 +# +# PIE_VER +# PIE_GCC_VER +# obsoleted: PIE_CORE +# These variables control patching in various updates for the logic +# controlling Position Independant Executables. PIE_VER is expected +# to be the version of this patch, PIE_GCC_VER the gcc version of +# the patch, and PIE_CORE (obsoleted) the actual filename of the patch. +# An example: +# PIE_VER="8.7.6.5" +# PIE_GCC_VER="3.4.0" +# The resulting filename of this tarball will be: +# gcc-${PIE_GCC_VER:-${GCC_RELEASE_VER}}-piepatches-v${PIE_VER}.tar.bz2 +# old syntax (do not define PIE_CORE anymore): +# PIE_CORE="gcc-3.4.0-piepatches-v${PIE_VER}.tar.bz2" +# +# SPECS_VER +# SPECS_GCC_VER +# This is for the minispecs files included in the hardened gcc-4.x +# The specs files for hardenedno*, vanilla and for building the "specs" file. +# SPECS_VER is expected to be the version of this patch, SPECS_GCC_VER +# the gcc version of the patch. +# An example: +# SPECS_VER="8.7.6.5" +# SPECS_GCC_VER="3.4.0" +# The resulting filename of this tarball will be: +# gcc-${SPECS_GCC_VER:-${GCC_RELEASE_VER}}-specs-${SPECS_VER}.tar.bz2 +# +# PP_VER +# PP_GCC_VER +# obsoleted: PP_FVER +# These variables control patching in stack smashing protection +# support. They both control the version of ProPolice to download. +# +# PP_VER / PP_GCC_VER +# Used to roll our own custom tarballs of ssp. +# PP_FVER / PP_VER +# Used for mirroring ssp straight from IBM. +# PP_VER sets the version of the directory in which to find the +# patch, and PP_FVER sets the version of the patch itself. For +# example: +# PP_VER="3_4" +# PP_FVER="${PP_VER//_/.}-2" +# would download gcc3_4/protector-3.4-2.tar.gz +# +# HTB_VER +# HTB_GCC_VER +# These variables control whether or not an ebuild supports Herman +# ten Brugge's bounds-checking patches. If you want to use a patch +# for an older gcc version with a new gcc, make sure you set +# HTB_GCC_VER to that version of gcc. +# +# MAN_VER +# The version of gcc for which we will download manpages. This will +# default to ${GCC_RELEASE_VER}, but we may not want to pre-generate man pages +# for prerelease test ebuilds for example. This allows you to +# continue using pre-generated manpages from the last stable release. +# If set to "none", this will prevent the downloading of manpages, +# which is useful for individual library targets. +# +gentoo_urls() { + local devspace="HTTP~lv/GCC/URI HTTP~eradicator/gcc/URI HTTP~vapier/dist/URI + HTTP~halcy0n/patches/URI HTTP~zorry/patches/gcc/URI HTTP~dirtyepic/dist/URI" + devspace=${devspace//HTTP/http:\/\/dev.gentoo.org\/} + echo mirror://gentoo/$1 ${devspace//URI/$1} +} +get_gcc_src_uri() { + export PATCH_GCC_VER=${PATCH_GCC_VER:-${GCC_RELEASE_VER}} + export UCLIBC_GCC_VER=${UCLIBC_GCC_VER:-${PATCH_GCC_VER}} + export PIE_GCC_VER=${PIE_GCC_VER:-${GCC_RELEASE_VER}} + export PP_GCC_VER=${PP_GCC_VER:-${GCC_RELEASE_VER}} + export HTB_GCC_VER=${HTB_GCC_VER:-${GCC_RELEASE_VER}} + export SPECS_GCC_VER=${SPECS_GCC_VER:-${GCC_RELEASE_VER}} + + # Set where to download gcc itself depending on whether we're using a + # prerelease, snapshot, or release tarball. + if [[ -n ${PRERELEASE} ]] ; then + GCC_SRC_URI="ftp://gcc.gnu.org/pub/gcc/prerelease-${PRERELEASE}/gcc-${PRERELEASE}.tar.bz2" + elif [[ -n ${SNAPSHOT} ]] ; then + GCC_SRC_URI="ftp://sources.redhat.com/pub/gcc/snapshots/${SNAPSHOT}/gcc-${SNAPSHOT}.tar.bz2" + else + GCC_SRC_URI="mirror://gnu/gcc/gcc-${GCC_PV}/gcc-${GCC_RELEASE_VER}.tar.bz2" + # we want all branch updates to be against the main release + [[ -n ${BRANCH_UPDATE} ]] && \ + GCC_SRC_URI="${GCC_SRC_URI} $(gentoo_urls gcc-${GCC_RELEASE_VER}-branch-update-${BRANCH_UPDATE}.patch.bz2)" + fi + + # propolice aka stack smashing protection + if [[ -n ${PP_VER} ]] ; then + if [[ -n ${PP_FVER} ]] ; then + GCC_SRC_URI="${GCC_SRC_URI} + !nossp? ( + http://www.research.ibm.com/trl/projects/security/ssp/gcc${PP_VER}/protector-${PP_FVER}.tar.gz + $(gentoo_urls protector-${PP_FVER}.tar.gz) + )" + else + GCC_SRC_URI="${GCC_SRC_URI} $(gentoo_urls gcc-${PP_GCC_VER}-ssp-${PP_VER}.tar.bz2)" + fi + fi + + # uclibc lovin + [[ -n ${UCLIBC_VER} ]] && \ + GCC_SRC_URI="${GCC_SRC_URI} $(gentoo_urls gcc-${UCLIBC_GCC_VER}-uclibc-patches-${UCLIBC_VER}.tar.bz2)" + + # PERL cannot be present at bootstrap, and is used to build the man pages. + # So... lets include some pre-generated ones, shall we? + [[ -n ${MAN_VER} ]] && \ + GCC_SRC_URI="${GCC_SRC_URI} $(gentoo_urls gcc-${MAN_VER}-manpages.tar.bz2)" + + # various gentoo patches + [[ -n ${PATCH_VER} ]] && \ + GCC_SRC_URI="${GCC_SRC_URI} $(gentoo_urls gcc-${PATCH_GCC_VER}-patches-${PATCH_VER}.tar.bz2)" + + # strawberry pie, Cappuccino and a Gauloises (it's a good thing) + [[ -n ${PIE_VER} ]] && \ + PIE_CORE=${PIE_CORE:-gcc-${PIE_GCC_VER}-piepatches-v${PIE_VER}.tar.bz2} && \ + GCC_SRC_URI="${GCC_SRC_URI} $(gentoo_urls ${PIE_CORE})" + + # gcc minispec for the hardened gcc 4 compiler + [[ -n ${SPECS_VER} ]] && \ + GCC_SRC_URI="${GCC_SRC_URI} $(gentoo_urls gcc-${SPECS_GCC_VER}-specs-${SPECS_VER}.tar.bz2)" + + # gcc bounds checking patch + if [[ -n ${HTB_VER} ]] ; then + local HTBFILE="bounds-checking-gcc-${HTB_GCC_VER}-${HTB_VER}.patch.bz2" + GCC_SRC_URI="${GCC_SRC_URI} + boundschecking? ( + mirror://sourceforge/boundschecking/${HTBFILE} + $(gentoo_urls ${HTBFILE}) + )" + fi + + # support for the D language + [[ -n ${D_VER} ]] && \ + GCC_SRC_URI="${GCC_SRC_URI} d? ( mirror://sourceforge/dgcc/gdc-${D_VER}-src.tar.bz2 )" + + # >= gcc-4.3 uses ecj.jar and we only add gcj as a use flag under certain + # conditions + if [[ ${PN} != "kgcc64" && ${PN} != gcc-* ]] ; then + tc_version_is_at_least "4.3" && \ + GCC_SRC_URI="${GCC_SRC_URI} + gcj? ( ftp://sourceware.org/pub/java/ecj-4.3.jar )" + fi + + echo "${GCC_SRC_URI}" +} +S=$(gcc_get_s_dir) +SRC_URI=$(get_gcc_src_uri) +#---->> S + SRC_URI essentials >>---- + + +#---->> support checks <<---- + +# Grab a variable from the build system (taken from linux-info.eclass) +get_make_var() { + local var=$1 makefile=${2:-${WORKDIR}/build/Makefile} + echo -e "e:\\n\\t@echo \$(${var})\\ninclude ${makefile}" | \ + r=${makefile%/*} emake --no-print-directory -s -f - 2>/dev/null +} +XGCC() { get_make_var GCC_FOR_TARGET ; } + +# The gentoo piessp patches allow for 3 configurations: +# 1) PIE+SSP by default +# 2) PIE by default +# 3) SSP by default +hardened_gcc_works() { + if [[ $1 == "pie" ]] ; then + # $gcc_cv_ld_pie is unreliable as it simply take the output of + # `ld --help | grep -- -pie`, that reports the option in all cases, also if + # the loader doesn't actually load the resulting executables. + # To avoid breakage, blacklist FreeBSD here at least + [[ ${CTARGET} == *-freebsd* ]] && return 1 + + want_pie || return 1 + tc_version_is_at_least 4.3.2 && use nopie && return 1 + hardened_gcc_is_stable pie && return 0 + if has "~$(tc-arch)" ${ACCEPT_KEYWORDS} ; then + hardened_gcc_check_unsupported pie && return 1 + ewarn "Allowing pie-by-default for an unstable arch ($(tc-arch))" + return 0 + fi + return 1 + elif [[ $1 == "ssp" ]] ; then + [[ -n ${PP_VER} ]] || [[ -n ${SPECS_VER} ]] || return 1 + tc_version_is_at_least 4.3.2 && use nossp && return 1 + hardened_gcc_is_stable ssp && return 0 + if has "~$(tc-arch)" ${ACCEPT_KEYWORDS} ; then + hardened_gcc_check_unsupported ssp && return 1 + ewarn "Allowing ssp-by-default for an unstable arch ($(tc-arch))" + return 0 + fi + return 1 + else + # laziness ;) + hardened_gcc_works pie || return 1 + hardened_gcc_works ssp || return 1 + return 0 + fi +} + +hardened_gcc_is_stable() { + if [[ $1 == "pie" ]] ; then + # HARDENED_* variables are deprecated and here for compatibility + local tocheck="${HARDENED_PIE_WORKS} ${HARDENED_GCC_WORKS}" + if [[ ${CTARGET} == *-uclibc* ]] ; then + tocheck="${tocheck} ${PIE_UCLIBC_STABLE}" + else + tocheck="${tocheck} ${PIE_GLIBC_STABLE}" + fi + elif [[ $1 == "ssp" ]] ; then + # ditto + local tocheck="${HARDENED_SSP_WORKS} ${HARDENED_GCC_WORKS}" + if [[ ${CTARGET} == *-uclibc* ]] ; then + tocheck="${tocheck} ${SSP_UCLIBC_STABLE}" + else + tocheck="${tocheck} ${SSP_STABLE}" + fi + else + die "hardened_gcc_stable needs to be called with pie or ssp" + fi + + hasq $(tc-arch) ${tocheck} && return 0 + return 1 +} + +hardened_gcc_check_unsupported() { + local tocheck="" + # if a variable is unset, we assume that all archs are unsupported. since + # this function is never called if hardened_gcc_is_stable returns true, + # this shouldn't cause problems... however, allowing this logic to work + # even with the variables unset will break older ebuilds that dont use them. + if [[ $1 == "pie" ]] ; then + if [[ ${CTARGET} == *-uclibc* ]] ; then + [[ -z ${PIE_UCLIBC_UNSUPPORTED} ]] && return 0 + tocheck="${tocheck} ${PIE_UCLIBC_UNSUPPORTED}" + else + [[ -z ${PIE_GLIBC_UNSUPPORTED} ]] && return 0 + tocheck="${tocheck} ${PIE_GLIBC_UNSUPPORTED}" + fi + elif [[ $1 == "ssp" ]] ; then + if [[ ${CTARGET} == *-uclibc* ]] ; then + [[ -z ${SSP_UCLIBC_UNSUPPORTED} ]] && return 0 + tocheck="${tocheck} ${SSP_UCLIBC_UNSUPPORTED}" + else + [[ -z ${SSP_UNSUPPORTED} ]] && return 0 + tocheck="${tocheck} ${SSP_UNSUPPORTED}" + fi + else + die "hardened_gcc_check_unsupported needs to be called with pie or ssp" + fi + + hasq $(tc-arch) ${tocheck} && return 0 + return 1 +} + +has_libssp() { + [[ -e /$(get_libdir)/libssp.so ]] && return 0 + return 1 +} + +want_libssp() { + [[ ${GCC_LIBSSP_SUPPORT} == "true" ]] || return 1 + has_libssp || return 1 + [[ -n ${PP_VER} ]] || return 1 + return 0 +} + +_want_stuff() { + local var=$1 flag=$2 + [[ -z ${!var} ]] && return 1 + use ${flag} && return 0 + return 1 +} +want_boundschecking() { _want_stuff HTB_VER boundschecking ; } +want_pie() { + ! use hardened && [[ -n ${PIE_VER} ]] && use nopie && return 1 + [[ -n ${PIE_VER} ]] && [[ -n ${SPECS_VER} ]] && return 0 + tc_version_is_at_least 4.3.2 && return 1 + _want_stuff PIE_VER !nopie +} +want_ssp() { _want_stuff PP_VER !nossp ; } + +# SPLIT_SPECS are deprecated for >=GCC 4.4 +want_split_specs() { + tc_version_is_at_least 4.4 && return 1 + [[ ${SPLIT_SPECS} == "true" ]] && want_pie +} +want_minispecs() { + if tc_version_is_at_least 4.3.2 && use hardened ; then + if ! want_pie ; then + ewarn "PIE_VER or SPECS_VER is not defiend in the GCC ebuild." + elif use vanilla ; then + ewarn "You will not get hardened features if you have the vanilla USE-flag." + elif use nopie && use nossp ; then + ewarn "You will not get hardened features if you have the nopie and nossp USE-flag." + elif ! hardened_gcc_works pie && ! hardened_gcc_works ssp && ! use nopie ; then + ewarn "Your $(tc-arch) arch is not supported." + else + return 0 + fi + ewarn "Hope you know what you are doing. Hardened will not work." + return 0 + fi + return 1 +} +# This function checks whether or not glibc has the support required to build +# Position Independant Executables with gcc. +glibc_have_pie() { + if [[ ! -f ${ROOT}/usr/$(get_libdir)/Scrt1.o ]] ; then + echo + ewarn "Your glibc does not have support for pie, the file Scrt1.o is missing" + ewarn "Please update your glibc to a proper version or disable hardened" + echo + return 1 + fi +} + +# This function determines whether or not libc has been patched with stack +# smashing protection support. +libc_has_ssp() { + [[ ${ROOT} != "/" ]] && return 0 + + # lib hacks taken from sandbox configure + echo 'int main(){}' > "${T}"/libctest.c + LC_ALL=C gcc "${T}"/libctest.c -lc -o libctest -Wl,-verbose &> "${T}"/libctest.log || return 1 + local libc_file=$(awk '/attempt to open/ { if (($4 ~ /\/libc\.so/) && ($5 == "succeeded")) LIBC = $4; }; END {print LIBC}' "${T}"/libctest.log) + + [[ -z ${libc_file} ]] && die "Unable to find a libc !?" + + # Check for gcc-4.x style ssp support + if [[ -n $(readelf -s "${libc_file}" 2>/dev/null | \ + grep 'FUNC.*GLOBAL.*__stack_chk_fail') ]] + then + return 0 + else + # Check for gcc-3.x style ssp support + if [[ -n $(readelf -s "${libc_file}" 2>/dev/null | \ + grep 'OBJECT.*GLOBAL.*__guard') ]] && \ + [[ -n $(readelf -s "${libc_file}" 2>/dev/null | \ + grep 'FUNC.*GLOBAL.*__stack_smash_handler') ]] + then + return 0 + elif is_crosscompile ; then + die "'${libc_file}' was detected w/out ssp, that sucks (a lot)" + else + return 1 + fi + fi +} + +# This is to make sure we don't accidentally try to enable support for a +# language that doesnt exist. GCC 3.4 supports f77, while 4.0 supports f95, etc. +# +# Also add a hook so special ebuilds (kgcc64) can control which languages +# exactly get enabled +gcc-lang-supported() { + grep ^language=\"${1}\" "${S}"/gcc/*/config-lang.in > /dev/null || return 1 + [[ -z ${TOOLCHAIN_ALLOWED_LANGS} ]] && return 0 + has $1 ${TOOLCHAIN_ALLOWED_LANGS} +} + +#----<< support checks >>---- + +#---->> specs + env.d logic <<---- + +# defaults to enable for all hardened toolchains +gcc_common_hard="-DEFAULT_RELRO -DEFAULT_BIND_NOW" + +# configure to build with the hardened GCC specs as the default +make_gcc_hard() { + if hardened_gcc_works ; then + einfo "Updating gcc to use automatic PIE + SSP building ..." + sed -e "s|^HARD_CFLAGS = |HARD_CFLAGS = -DEFAULT_PIE_SSP ${gcc_common_hard} |" \ + -i "${S}"/gcc/Makefile.in || die "Failed to update gcc!" + elif hardened_gcc_works pie ; then + einfo "Updating gcc to use automatic PIE building ..." + ewarn "SSP has not been enabled by default" + sed -e "s|^HARD_CFLAGS = |HARD_CFLAGS = -DEFAULT_PIE ${gcc_common_hard} |" \ + -i "${S}"/gcc/Makefile.in || die "Failed to update gcc!" + elif hardened_gcc_works ssp ; then + einfo "Updating gcc to use automatic SSP building ..." + ewarn "PIE has not been enabled by default" + sed -e "s|^HARD_CFLAGS = |HARD_CFLAGS = -DEFAULT_SSP ${gcc_common_hard} |" \ + -i "${S}"/gcc/Makefile.in || die "Failed to update gcc!" + else + # do nothing if hardened isnt supported, but dont die either + ewarn "hardened is not supported for this arch in this gcc version" + ebeep + return 0 + fi + + # rebrand to make bug reports easier + BRANDING_GCC_PKGVERSION=${BRANDING_GCC_PKGVERSION/Gentoo/Gentoo Hardened} +} + +# now we generate different spec files so that the user can select a compiler +# that enforces certain features in gcc itself and so we don't have to worry +# about a certain package ignoring CFLAGS/LDFLAGS +_create_specs_file() { + # Usage: _create_specs_file + local uflag=$1 name=$2 flags=${*:3} + ebegin "Creating a ${name} gcc specs file" + pushd "${WORKDIR}"/build/gcc > /dev/null + if [[ -z ${uflag} ]] || use ${uflag} ; then + # backup the compiler first + cp Makefile Makefile.orig + sed -i -e '/^HARD_CFLAGS/s:=.*:='"${flags}"':' Makefile + mv xgcc xgcc.foo + mv gcc.o gcc.o.foo + emake -s xgcc + $(XGCC) -dumpspecs > "${WORKDIR}"/build/${name}.specs + # restore everything to normal + mv gcc.o.foo gcc.o + mv xgcc.foo xgcc + mv Makefile.orig Makefile + else + $(XGCC) -dumpspecs > "${WORKDIR}"/build/${name}.specs + fi + popd > /dev/null + eend $([[ -s ${WORKDIR}/build/${name}.specs ]] ; echo $?) +} +create_vanilla_specs_file() { _create_specs_file hardened vanilla ; } +create_hardened_specs_file() { _create_specs_file !hardened hardened ${gcc_common_hard} -DEFAULT_PIE_SSP ; } +create_hardenednossp_specs_file() { _create_specs_file "" hardenednossp ${gcc_common_hard} -DEFAULT_PIE ; } +create_hardenednopie_specs_file() { _create_specs_file "" hardenednopie ${gcc_common_hard} -DEFAULT_SSP ; } +create_hardenednopiessp_specs_file() { _create_specs_file "" hardenednopiessp ${gcc_common_hard} ; } + +split_out_specs_files() { + local s spec_list="hardenednopiessp vanilla" + if hardened_gcc_works ; then + spec_list="${spec_list} hardened hardenednossp hardenednopie" + elif hardened_gcc_works pie ; then + spec_list="${spec_list} hardenednossp" + elif hardened_gcc_works ssp ; then + spec_list="${spec_list} hardenednopie" + fi + for s in ${spec_list} ; do + create_${s}_specs_file || return 1 + done +} + +create_gcc_env_entry() { + dodir /etc/env.d/gcc + local gcc_envd_base="/etc/env.d/gcc/${CTARGET}-${GCC_CONFIG_VER}" + + if [[ -z $1 ]] ; then + gcc_envd_file="${D}${gcc_envd_base}" + # I'm leaving the following commented out to remind me that it + # was an insanely -bad- idea. Stuff broke. GCC_SPECS isnt unset + # on chroot or in non-toolchain.eclass gcc ebuilds! + #gcc_specs_file="${LIBPATH}/specs" + gcc_specs_file="" + else + gcc_envd_file="${D}${gcc_envd_base}-$1" + gcc_specs_file="${LIBPATH}/$1.specs" + fi + + # phase PATH/ROOTPATH out ... + echo "PATH=\"${BINPATH}\"" > ${gcc_envd_file} + echo "ROOTPATH=\"${BINPATH}\"" >> ${gcc_envd_file} + echo "GCC_PATH=\"${BINPATH}\"" >> ${gcc_envd_file} + + if use multilib && ! has_multilib_profile; then + LDPATH="${LIBPATH}" + for path in 32 64 ; do + [[ -d ${LIBPATH}/${path} ]] && LDPATH="${LDPATH}:${LIBPATH}/${path}" + done + else + local MULTIDIR + LDPATH="${LIBPATH}" + + # We want to list the default ABI's LIBPATH first so libtool + # searches that directory first. This is a temporary + # workaround for libtool being stupid and using .la's from + # conflicting ABIs by using the first one in the search path + + local abi=${DEFAULT_ABI} + local MULTIDIR=$($(XGCC) $(get_abi_CFLAGS ${abi}) --print-multi-directory) + if [[ ${MULTIDIR} == "." ]] ; then + LDPATH=${LIBPATH} + else + LDPATH=${LIBPATH}/${MULTIDIR} + fi + + for abi in $(get_all_abis) ; do + [[ ${abi} == ${DEFAULT_ABI} ]] && continue + + MULTIDIR=$($(XGCC) $(get_abi_CFLAGS ${abi}) --print-multi-directory) + if [[ ${MULTIDIR} == "." ]] ; then + LDPATH=${LDPATH}:${LIBPATH} + else + LDPATH=${LDPATH}:${LIBPATH}/${MULTIDIR} + fi + done + fi + + echo "LDPATH=\"${LDPATH}\"" >> ${gcc_envd_file} + echo "MANPATH=\"${DATAPATH}/man\"" >> ${gcc_envd_file} + echo "INFOPATH=\"${DATAPATH}/info\"" >> ${gcc_envd_file} + echo "STDCXX_INCDIR=\"${STDCXX_INCDIR##*/}\"" >> ${gcc_envd_file} + + is_crosscompile && echo "CTARGET=${CTARGET}" >> ${gcc_envd_file} + + # Set which specs file to use + [[ -n ${gcc_specs_file} ]] && echo "GCC_SPECS=\"${gcc_specs_file}\"" >> ${gcc_envd_file} +} +setup_minispecs_gcc_build_specs() { + # Setup the "build.specs" file for gcc 4.3 to use when building. + if hardened_gcc_works pie ; then + cat "${WORKDIR}"/specs/pie.specs >> "${WORKDIR}"/build.specs + fi + if hardened_gcc_works ssp ; then + for s in ssp sspall ; do + cat "${WORKDIR}"/specs/${s}.specs >> "${WORKDIR}"/build.specs + done + fi + for s in nostrict znow ; do + cat "${WORKDIR}"/specs/${s}.specs >> "${WORKDIR}"/build.specs + done + export GCC_SPECS="${WORKDIR}"/build.specs +} +copy_minispecs_gcc_specs() { + # setup the hardenedno* specs files and the vanilla specs file. + if hardened_gcc_works ; then + create_gcc_env_entry hardenednopiessp + fi + if hardened_gcc_works pie ; then + create_gcc_env_entry hardenednopie + fi + if hardened_gcc_works ssp ; then + create_gcc_env_entry hardenednossp + fi + create_gcc_env_entry vanilla + insinto ${LIBPATH} + doins "${WORKDIR}"/specs/*.specs || die "failed to install specs" + # Build system specs file which, if it exists, must be a complete set of + # specs as it completely and unconditionally overrides the builtin specs. + # For gcc 4.3 + if ! tc_version_is_at_least 4.4 ; then + $(XGCC) -dumpspecs > "${WORKDIR}"/specs/specs + cat "${WORKDIR}"/build.specs >> "${WORKDIR}"/specs/specs + doins "${WORKDIR}"/specs/specs || die "failed to install the specs file" + fi +} + +#----<< specs + env.d logic >>---- + +#---->> pkg_* <<---- +gcc_pkg_setup() { + [[ -z ${ETYPE} ]] && die "Your ebuild needs to set the ETYPE variable" + + if [[ ( $(tc-arch) == "amd64" || $(tc-arch) == "ppc64" ) && ( ${LD_PRELOAD} == "/lib/libsandbox.so" || ${LD_PRELOAD} == "/usr/lib/libsandbox.so" ) ]] && is_multilib ; then + eerror "Sandbox in your installed portage does not support compilation." + eerror "of a multilib gcc. Please set FEATURES=-sandbox and try again." + eerror "After you have a multilib gcc, re-emerge portage to have a working sandbox." + die "No 32bit sandbox. Retry with FEATURES=-sandbox." + fi + + if [[ ${ETYPE} == "gcc-compiler" ]] ; then + case $(tc-arch) in + mips) + # Must compile for mips64-linux target if we want n32/n64 support + case "${CTARGET}" in + mips64*) ;; + *) + if use n32 || use n64; then + eerror "n32/n64 can only be used when target host is mips64*-*-linux-*"; + die "Invalid USE flags for CTARGET ($CTARGET)"; + fi + ;; + esac + + #cannot have both n32 & n64 without multilib + if use n32 && use n64 && ! is_multilib; then + eerror "Please enable multilib if you want to use both n32 & n64"; + die "Invalid USE flag combination"; + fi + ;; + esac + + # Setup variables which would normally be in the profile + if is_crosscompile ; then + multilib_env ${CTARGET} + if ! use multilib ; then + MULTILIB_ABIS=${DEFAULT_ABI} + fi + fi + + # we dont want to use the installed compiler's specs to build gcc! + unset GCC_SPECS + fi + + want_libssp && libc_has_ssp && \ + die "libssp cannot be used with a glibc that has been patched to provide ssp symbols" + want_minispecs + + unset LANGUAGES #265283 +} + +gcc-compiler_pkg_preinst() { + : +} + +gcc-compiler_pkg_postinst() { + do_gcc_config + + if ! is_crosscompile ; then + echo + ewarn "If you have issues with packages unable to locate libstdc++.la," + ewarn "then try running 'fix_libtool_files.sh' on the old gcc versions." + echo + fi + + # If our gcc-config version doesn't like '-' in it's version string, + # tell our users that gcc-config will yell at them, but it's all good. + if ! has_version '>=sys-devel/gcc-config-1.3.10-r1' && [[ ${GCC_CONFIG_VER/-/} != ${GCC_CONFIG_VER} ]] ; then + ewarn "Your version of gcc-config will issue about having an invalid profile" + ewarn "when switching to this profile. It is safe to ignore this warning," + ewarn "and this problem has been corrected in >=sys-devel/gcc-config-1.3.10-r1." + fi + + if ! is_crosscompile && ! use multislot && [[ ${GCCMAJOR}.${GCCMINOR} == 3.4 ]] ; then + echo + ewarn "You should make sure to rebuild all your C++ packages when" + ewarn "upgrading between different versions of gcc. For example," + ewarn "when moving to gcc-3.4 from gcc-3.3, emerge gentoolkit and run:" + ewarn " # revdep-rebuild --library libstdc++.so.5" + echo + ewarn "For more information on the steps to take when upgrading " + ewarn "from gcc-3.3 please refer to: " + ewarn "http://www.gentoo.org/doc/en/gcc-upgrading.xml" + echo + fi + + if ! is_crosscompile ; then + # hack to prevent collisions between SLOT + [[ ! -d ${ROOT}/lib/rcscripts/awk ]] \ + && mkdir -p "${ROOT}"/lib/rcscripts/awk + [[ ! -d ${ROOT}/sbin ]] \ + && mkdir -p "${ROOT}"/sbin + cp "${ROOT}/${DATAPATH}"/fixlafiles.awk "${ROOT}"/lib/rcscripts/awk/ || die "installing fixlafiles.awk" + cp "${ROOT}/${DATAPATH}"/fix_libtool_files.sh "${ROOT}"/sbin/ || die "installing fix_libtool_files.sh" + + [[ ! -d ${ROOT}/usr/bin ]] \ + && mkdir -p "${ROOT}"/usr/bin + # Since these aren't critical files and portage sucks with + # handling of binpkgs, don't require these to be found + for x in "${ROOT}/${DATAPATH}"/c{89,99} ; do + if [[ -e ${x} ]]; then + cp ${x} "${ROOT}"/usr/bin/ || die "installing c89/c99" + fi + done + fi +} + +gcc-compiler_pkg_prerm() { + # Don't let these files be uninstalled #87647 + touch -c "${ROOT}"/sbin/fix_libtool_files.sh \ + "${ROOT}"/lib/rcscripts/awk/fixlafiles.awk +} + +gcc-compiler_pkg_postrm() { + # to make our lives easier (and saner), we do the fix_libtool stuff here. + # rather than checking SLOT's and trying in upgrade paths, we just see if + # the common libstdc++.la exists in the ${LIBPATH} of the gcc that we are + # unmerging. if it does, that means this was a simple re-emerge. + + # clean up the cruft left behind by cross-compilers + if is_crosscompile ; then + if [[ -z $(ls "${ROOT}"/etc/env.d/gcc/${CTARGET}* 2>/dev/null) ]] ; then + rm -f "${ROOT}"/etc/env.d/gcc/config-${CTARGET} + rm -f "${ROOT}"/etc/env.d/??gcc-${CTARGET} + rm -f "${ROOT}"/usr/bin/${CTARGET}-{gcc,{g,c}++}{,32,64} + fi + return 0 + fi + + # ROOT isnt handled by the script + [[ ${ROOT} != "/" ]] && return 0 + + if [[ ! -e ${LIBPATH}/libstdc++.so ]] ; then + # make sure the profile is sane during same-slot upgrade #289403 + do_gcc_config + + einfo "Running 'fix_libtool_files.sh ${GCC_RELEASE_VER}'" + /sbin/fix_libtool_files.sh ${GCC_RELEASE_VER} + if [[ -n ${BRANCH_UPDATE} ]] ; then + einfo "Running 'fix_libtool_files.sh ${GCC_RELEASE_VER}-${BRANCH_UPDATE}'" + /sbin/fix_libtool_files.sh ${GCC_RELEASE_VER}-${BRANCH_UPDATE} + fi + fi + + return 0 +} + +#---->> pkg_* <<---- + +#---->> src_* <<---- + +# generic GCC src_unpack, to be called from the ebuild's src_unpack. +# BIG NOTE regarding hardened support: ebuilds with support for hardened are +# expected to export the following variable: +# +# HARDENED_GCC_WORKS +# This variable should be set to the archs on which hardened should +# be allowed. For example: HARDENED_GCC_WORKS="x86 sparc amd64" +# This allows for additional archs to be supported by hardened when +# ready. +# +# Travis Tilley (03 Sep 2004) +# +gcc-compiler_src_unpack() { + # fail if using pie patches, building hardened, and glibc doesnt have + # the necessary support + want_pie && use hardened && glibc_have_pie + + if use hardened ; then + einfo "updating configuration to build hardened GCC" + make_gcc_hard || die "failed to make gcc hard" + fi + + if is_libffi ; then + # move the libffi target out of gcj and into all + sed -i \ + -e '/^libgcj=/s:target-libffi::' \ + -e '/^target_lib/s:=":="target-libffi :' \ + "${S}"/configure || die + fi +} +gcc-library_src_unpack() { + : +} +guess_patch_type_in_dir() { + [[ -n $(ls "$1"/*.bz2 2>/dev/null) ]] \ + && EPATCH_SUFFIX="patch.bz2" \ + || EPATCH_SUFFIX="patch" +} +do_gcc_rename_java_bins() { + # bug #139918 - conflict between gcc and java-config-2 for ownership of + # /usr/bin/rmi{c,registry}. Done with mv & sed rather than a patch + # because patches would be large (thanks to the rename of man files), + # and it's clear from the sed invocations that all that changes is the + # rmi{c,registry} names to grmi{c,registry} names. + # Kevin F. Quinn 2006-07-12 + einfo "Renaming jdk executables rmic and rmiregistry to grmic and grmiregistry." + # 1) Move the man files if present (missing prior to gcc-3.4) + for manfile in rmic rmiregistry; do + [[ -f ${S}/gcc/doc/${manfile}.1 ]] || continue + mv "${S}"/gcc/doc/${manfile}.1 "${S}"/gcc/doc/g${manfile}.1 + done + # 2) Fixup references in the docs if present (mission prior to gcc-3.4) + for jfile in gcc/doc/gcj.info gcc/doc/grmic.1 gcc/doc/grmiregistry.1 gcc/java/gcj.texi; do + [[ -f ${S}/${jfile} ]] || continue + sed -i -e 's:rmiregistry:grmiregistry:g' "${S}"/${jfile} || + die "Failed to fixup file ${jfile} for rename to grmiregistry" + sed -i -e 's:rmic:grmic:g' "${S}"/${jfile} || + die "Failed to fixup file ${jfile} for rename to grmic" + done + # 3) Fixup Makefiles to build the changed executable names + # These are present in all 3.x versions, and are the important bit + # to get gcc to build with the new names. + for jfile in libjava/Makefile.am libjava/Makefile.in gcc/java/Make-lang.in; do + sed -i -e 's:rmiregistry:grmiregistry:g' "${S}"/${jfile} || + die "Failed to fixup file ${jfile} for rename to grmiregistry" + # Careful with rmic on these files; it's also the name of a directory + # which should be left unchanged. Replace occurrences of 'rmic$', + # 'rmic_' and 'rmic '. + sed -i -e 's:rmic\([$_ ]\):grmic\1:g' "${S}"/${jfile} || + die "Failed to fixup file ${jfile} for rename to grmic" + done +} +gcc_src_unpack() { + export BRANDING_GCC_PKGVERSION="Gentoo ${GCC_PVR}" + + [[ -z ${UCLIBC_VER} ]] && [[ ${CTARGET} == *-uclibc* ]] && die "Sorry, this version does not support uClibc" + + gcc_quick_unpack + exclude_gcc_patches + + cd "${S}" + + if ! use vanilla ; then + if [[ -n ${PATCH_VER} ]] ; then + guess_patch_type_in_dir "${WORKDIR}"/patch + EPATCH_MULTI_MSG="Applying Gentoo patches ..." \ + epatch "${WORKDIR}"/patch + BRANDING_GCC_PKGVERSION="${BRANDING_GCC_PKGVERSION} p${PATCH_VER}" + fi + if [[ -n ${UCLIBC_VER} ]] ; then + guess_patch_type_in_dir "${WORKDIR}"/uclibc + EPATCH_MULTI_MSG="Applying uClibc patches ..." \ + epatch "${WORKDIR}"/uclibc + fi + fi + do_gcc_HTB_patches + do_gcc_SSP_patches + do_gcc_PIE_patches + epatch_user + + ${ETYPE}_src_unpack || die "failed to ${ETYPE}_src_unpack" + + # protoize don't build on FreeBSD, skip it + ## removed in 4.5, bug #270558 --de. + if [[ ${GCCMAJOR}.${GCCMINOR} < 4.5 ]]; then + if ! is_crosscompile && ! use elibc_FreeBSD ; then + # enable protoize / unprotoize + sed -i -e '/^LANGUAGES =/s:$: proto:' "${S}"/gcc/Makefile.in + fi + fi + + fix_files="" + for x in contrib/test_summary libstdc++-v3/scripts/check_survey.in ; do + [[ -e ${x} ]] && fix_files="${fix_files} ${x}" + done + ht_fix_file ${fix_files} */configure *.sh */Makefile.in + + if ! is_crosscompile && is_multilib && \ + [[ ( $(tc-arch) == "amd64" || $(tc-arch) == "ppc64" ) && -z ${SKIP_MULTILIB_HACK} ]] ; then + disgusting_gcc_multilib_HACK || die "multilib hack failed" + fi + + gcc_version_patch + if [[ ${GCCMAJOR}.${GCCMINOR} > 4.0 ]] ; then + if [[ -n ${SNAPSHOT} || -n ${PRERELEASE} ]] ; then + echo ${PV/_/-} > "${S}"/gcc/BASE-VER + fi + fi + + # >= gcc-4.3 doesn't bundle ecj.jar, so copy it + if [[ ${GCCMAJOR}.${GCCMINOR} > 4.2 ]] && + use gcj ; then + cp -pPR "${DISTDIR}/ecj-4.3.jar" "${S}/ecj.jar" || die + fi + + # disable --as-needed from being compiled into gcc specs + # natively when using a gcc version < 3.4.4 + # http://gcc.gnu.org/bugzilla/show_bug.cgi?id=14992 + if ! tc_version_is_at_least 3.4.4 ; then + sed -i -e s/HAVE_LD_AS_NEEDED/USE_LD_AS_NEEDED/g "${S}"/gcc/config.in + fi + + # In gcc 3.3.x and 3.4.x, rename the java bins to gcc-specific names + # in line with gcc-4. + if [[ ${GCCMAJOR} == 3 ]] && + [[ ${GCCMINOR} -ge 3 ]] + then + do_gcc_rename_java_bins + fi + + # Fixup libtool to correctly generate .la files with portage + cd "${S}" + elibtoolize --portage --shallow --no-uclibc + + gnuconfig_update + + # update configure files + local f + einfo "Fixing misc issues in configure files" + tc_version_is_at_least 4.1 && epatch "${GCC_FILESDIR}"/gcc-configure-texinfo.patch + for f in $(grep -l 'autoconf version 2.13' $(find "${S}" -name configure)) ; do + ebegin " Updating ${f/${S}\/} [LANG]" + patch "${f}" "${GCC_FILESDIR}"/gcc-configure-LANG.patch >& "${T}"/configure-patch.log \ + || eerror "Please file a bug about this" + eend $? + done + sed -i 's|A-Za-z0-9|[:alnum:]|g' "${S}"/gcc/*.awk #215828 + + if [[ -x contrib/gcc_update ]] ; then + einfo "Touching generated files" + ./contrib/gcc_update --touch | \ + while read f ; do + einfo " ${f%%...}" + done + fi + +# disable_multilib_libjava || die "failed to disable multilib java" +} + +gcc-library-configure() { + # multilib support + [[ ${GCC_TARGET_NO_MULTILIB} == "true" ]] \ + && confgcc="${confgcc} --disable-multilib" \ + || confgcc="${confgcc} --enable-multilib" +} + +gcc-compiler-configure() { + # multilib support + if is_multilib ; then + confgcc="${confgcc} --enable-multilib" + elif [[ ${CTARGET} == *-linux* ]] ; then + confgcc="${confgcc} --disable-multilib" + fi + + if tc_version_is_at_least "4.0" ; then + if has mudflap ${IUSE} ; then + confgcc="${confgcc} $(use_enable mudflap libmudflap)" + else + confgcc="${confgcc} --disable-libmudflap" + fi + + if want_libssp ; then + confgcc="${confgcc} --enable-libssp" + else + export gcc_cv_libc_provides_ssp=yes + confgcc="${confgcc} --disable-libssp" + fi + + # If we want hardened support with the newer piepatchset for >=gcc 4.4 + if tc_version_is_at_least 4.4 && want_minispecs ; then + confgcc="${confgcc} $(use_enable hardened esp)" + fi + + if tc_version_is_at_least "4.2" ; then + confgcc="${confgcc} $(use_enable openmp libgomp)" + fi + + # enable the cld workaround until we move things to stable. + # by that point, the rest of the software out there should + # have caught up. + if tc_version_is_at_least "4.3" ; then + if ! has ${ARCH} ${KEYWORDS} ; then + confgcc="${confgcc} --enable-cld" + fi + fi + + # Stick the python scripts in their own slotted directory + # bug #279252 + # + # --with-python-dir=DIR + # Specifies where to install the Python modules used for aot-compile. DIR + # should not include the prefix used in installation. For example, if the + # Python modules are to be installed in /usr/lib/python2.5/site-packages, + # then –with-python-dir=/lib/python2.5/site-packages should be passed. + # + # This should translate into "/share/gcc-data/${CTARGET}/${GCC_CONFIG_VER}/python" + if tc_version_is_at_least "4.4" ; then + confgcc="${confgcc} --with-python-dir=${DATAPATH/$PREFIX/}/python" + fi + fi + + # For newer versions of gcc, use the default ("release"), because no + # one (even upstream apparently) tests with it disabled. #317217 + if tc_version_is_at_least 4 || [[ -n ${GCC_CHECKS_LIST} ]] ; then + confgcc="${confgcc} --enable-checking=${GCC_CHECKS_LIST:-release}" + else + confgcc="${confgcc} --disable-checking" + fi + + # GTK+ is preferred over xlib in 3.4.x (xlib is unmaintained + # right now). Much thanks to for the heads up. + # Travis Tilley (11 Jul 2004) + if ! is_gcj ; then + confgcc="${confgcc} --disable-libgcj" + elif use gtk ; then + confgcc="${confgcc} --enable-java-awt=gtk \ + $(use_enable nsplugin plugin)" + elif use qt ; then + confgcc="${confgcc} --enable-java-awt=qt" + fi + + if is_crosscompile && is_gcj ; then + confgcc="${confgcc} --disable-jni" + fi + + case $(tc-arch) in + arm) #264534 + local arm_arch="${CTARGET%%-*}" + # Only do this if arm_arch is armv* + if [[ ${arm_arch} == armv* ]] ; then + # Convert armv7{a,r,m} to armv7-{a,r,m} + [[ ${arm_arch} == armv7? ]] && arm_arch=${arm_arch/7/7-} + # Remove endian ('l' / 'eb') + [[ ${arm_arch} == *l ]] && arm_arch=${arm_arch%l} + [[ ${arm_arch} == *eb ]] && arm_arch=${arm_arch%eb} + confgcc="${confgcc} --with-arch=${arm_arch}" + fi + + # Enable hardvfp + if [[ ${CTARGET##*-} == *eabi ]] && [[ $(tc-is-hardfloat) == yes ]] && \ + tc_version_is_at_least "4.5" ; then + confgcc="${confgcc} --with-float=hard" + fi + ;; + # Add --with-abi flags to set default MIPS ABI + mips) + local mips_abi="" + use n64 && mips_abi="--with-abi=64" + use n32 && mips_abi="--with-abi=n32" + [[ -n ${mips_abi} ]] && confgcc="${confgcc} ${mips_abi}" + ;; + # Default arch for x86 is normally i386, lets give it a bump + # since glibc will do so based on CTARGET anyways + x86) + confgcc="${confgcc} --with-arch=${CTARGET%%-*}" + ;; + # Enable sjlj exceptions for backward compatibility on hppa + hppa) + [[ ${GCCMAJOR} == "3" ]] && confgcc="${confgcc} --enable-sjlj-exceptions" + ;; + esac + + GCC_LANG="c" + is_cxx && GCC_LANG="${GCC_LANG},c++" + is_d && GCC_LANG="${GCC_LANG},d" + is_gcj && GCC_LANG="${GCC_LANG},java" + if is_objc || is_objcxx ; then + GCC_LANG="${GCC_LANG},objc" + if tc_version_is_at_least "4.0" ; then + use objc-gc && confgcc="${confgcc} --enable-objc-gc" + fi + is_objcxx && GCC_LANG="${GCC_LANG},obj-c++" + fi + is_treelang && GCC_LANG="${GCC_LANG},treelang" + + # fortran support just got sillier! the lang value can be f77 for + # fortran77, f95 for fortran95, or just plain old fortran for the + # currently supported standard depending on gcc version. + is_fortran && GCC_LANG="${GCC_LANG},fortran" + is_f77 && GCC_LANG="${GCC_LANG},f77" + is_f95 && GCC_LANG="${GCC_LANG},f95" + + # We do NOT want 'ADA support' in here! + # is_ada && GCC_LANG="${GCC_LANG},ada" + + einfo "configuring for GCC_LANG: ${GCC_LANG}" +} + +# Other than the variables described for gcc_setup_variables, the following +# will alter tha behavior of gcc_do_configure: +# +# CTARGET +# CBUILD +# Enable building for a target that differs from CHOST +# +# GCC_TARGET_NO_MULTILIB +# Disable multilib. Useful when building single library targets. +# +# GCC_LANG +# Enable support for ${GCC_LANG} languages. defaults to just "c" +# +# Travis Tilley (04 Sep 2004) +# +gcc_do_configure() { + local confgcc + + # Set configuration based on path variables + confgcc="${confgcc} \ + --prefix=${PREFIX} \ + --bindir=${BINPATH} \ + --includedir=${INCLUDEPATH} \ + --datadir=${DATAPATH} \ + --mandir=${DATAPATH}/man \ + --infodir=${DATAPATH}/info \ + --with-gxx-include-dir=${STDCXX_INCDIR}" + # On Darwin we need libdir to be set in order to get correct install names + # for things like libobjc-gnu, libgcj and libfortran. If we enable it on + # non-Darwin we screw up the behaviour this eclass relies on. We in + # particular need this over --libdir for bug #255315. + [[ ${CHOST} == *-darwin* ]] && \ + confgcc="${confgcc} --enable-version-specific-runtime-libs" + + # All our cross-compile logic goes here ! woo ! + confgcc="${confgcc} --host=${CHOST}" + if is_crosscompile || tc-is-cross-compiler ; then + # Straight from the GCC install doc: + # "GCC has code to correctly determine the correct value for target + # for nearly all native systems. Therefore, we highly recommend you + # not provide a configure target when configuring a native compiler." + confgcc="${confgcc} --target=${CTARGET}" + fi + [[ -n ${CBUILD} ]] && confgcc="${confgcc} --build=${CBUILD}" + + # ppc altivec support + confgcc="${confgcc} $(use_enable altivec)" + + # gcc has fixed-point arithmetic support in 4.3 for mips targets that can + # significantly increase compile time by several hours. This will allow + # users to control this feature in the event they need the support. + tc_version_is_at_least "4.3" && confgcc="${confgcc} $(use_enable fixed-point)" + + # graphite support was added in 4.4, which depends upon external libraries + # for optimizations. This option allows users to determine if they want + # these optimizations and libraries pulled in + tc_version_is_at_least "4.4" && \ + confgcc="${confgcc} $(use_with graphite ppl) $(use_with graphite cloog)" + + # lto support was added in 4.5, which depends upon elfutils. This allows + # users to enable that option, and pull in the additional library + tc_version_is_at_least "4.5" && \ + confgcc="${confgcc} $(use_enable lto)" + + + [[ $(tc-is-softfloat) == "yes" ]] && confgcc="${confgcc} --with-float=soft" + [[ $(tc-is-hardfloat) == "yes" ]] && confgcc="${confgcc} --with-float=hard" + + # Native Language Support + if use nls ; then + confgcc="${confgcc} --enable-nls --without-included-gettext" + else + confgcc="${confgcc} --disable-nls" + fi + + # reasonably sane globals (hopefully) + confgcc="${confgcc} \ + --with-system-zlib \ + --disable-werror \ + --enable-secureplt" + + # etype specific configuration + einfo "running ${ETYPE}-configure" + ${ETYPE}-configure || die + + # if not specified, assume we are building for a target that only + # requires C support + GCC_LANG=${GCC_LANG:-c} + confgcc="${confgcc} --enable-languages=${GCC_LANG}" + + if is_crosscompile ; then + # When building a stage1 cross-compiler (just C compiler), we have to + # disable a bunch of features or gcc goes boom + local needed_libc="" + case ${CTARGET} in + *-linux) needed_libc=no-fucking-clue;; + *-dietlibc) needed_libc=dietlibc;; + *-elf) needed_libc=newlib;; + *-freebsd*) needed_libc=freebsd-lib;; + *-gnu*) needed_libc=glibc;; + *-klibc) needed_libc=klibc;; + *-uclibc*) needed_libc=uclibc;; + *-cygwin) needed_libc=cygwin;; + mingw*|*-mingw*) needed_libc=mingw-runtime;; + avr) confgcc="${confgcc} --enable-shared --disable-threads";; + esac + if [[ -n ${needed_libc} ]] ; then + if ! has_version ${CATEGORY}/${needed_libc} ; then + confgcc="${confgcc} --disable-shared --disable-threads --without-headers" + elif built_with_use --hidden --missing false ${CATEGORY}/${needed_libc} crosscompile_opts_headers-only ; then + confgcc="${confgcc} --disable-shared --with-sysroot=${PREFIX}/${CTARGET}" + else + confgcc="${confgcc} --with-sysroot=${PREFIX}/${CTARGET}" + fi + fi + + if [[ ${GCCMAJOR}.${GCCMINOR} > 4.1 ]] ; then + confgcc="${confgcc} --disable-bootstrap" + fi + else + if tc-is-static-only ; then + confgcc="${confgcc} --disable-shared" + else + confgcc="${confgcc} --enable-shared" + fi + case ${CHOST} in + mingw*|*-mingw*|*-cygwin) + confgcc="${confgcc} --enable-threads=win32" ;; + *-mint*) + confgcc="${confgcc} --disable-threads" ;; + *) + confgcc="${confgcc} --enable-threads=posix" ;; + esac + fi + [[ ${CTARGET} == *-elf ]] && confgcc="${confgcc} --with-newlib" + # __cxa_atexit is "essential for fully standards-compliant handling of + # destructors", but apparently requires glibc. + if [[ ${CTARGET} == *-uclibc* ]] ; then + confgcc="${confgcc} --disable-__cxa_atexit --enable-target-optspace $(use_enable nptl tls)" + [[ ${GCCMAJOR}.${GCCMINOR} == 3.3 ]] && confgcc="${confgcc} --enable-sjlj-exceptions" + if tc_version_is_at_least 3.4 && [[ ${GCCMAJOR}.${GCCMINOR} < 4.3 ]] ; then + confgcc="${confgcc} --enable-clocale=uclibc" + fi + elif [[ ${CTARGET} == *-gnu* ]] ; then + confgcc="${confgcc} --enable-__cxa_atexit" + confgcc="${confgcc} --enable-clocale=gnu" + elif [[ ${CTARGET} == *-freebsd* ]]; then + confgcc="${confgcc} --enable-__cxa_atexit" + elif [[ ${CTARGET} == *-solaris* ]]; then + confgcc="${confgcc} --enable-__cxa_atexit" + fi + [[ ${GCCMAJOR}.${GCCMINOR} < 3.4 ]] && confgcc="${confgcc} --disable-libunwind-exceptions" + + # create a sparc*linux*-{gcc,g++} that can handle -m32 and -m64 (biarch) + if [[ ${CTARGET} == sparc*linux* ]] \ + && is_multilib \ + && ! is_crosscompile \ + && [[ ${GCCMAJOR}.${GCCMINOR} > 4.2 ]] + then + confgcc="${confgcc} --enable-targets=all" + fi + + tc_version_is_at_least 4.3 && set -- "$@" \ + --with-bugurl=http://bugs.gentoo.org/ \ + --with-pkgversion="${BRANDING_GCC_PKGVERSION}" + set -- ${confgcc} "$@" ${EXTRA_ECONF} + + # Nothing wrong with a good dose of verbosity + echo + einfo "PREFIX: ${PREFIX}" + einfo "BINPATH: ${BINPATH}" + einfo "LIBPATH: ${LIBPATH}" + einfo "DATAPATH: ${DATAPATH}" + einfo "STDCXX_INCDIR: ${STDCXX_INCDIR}" + echo + einfo "Configuring GCC with: ${@//--/\n\t--}" + echo + + # Build in a separate build tree + mkdir -p "${WORKDIR}"/build + pushd "${WORKDIR}"/build > /dev/null + + # and now to do the actual configuration + addwrite /dev/zero + echo "${S}"/configure "$@" + "${S}"/configure "$@" || die "failed to run configure" + + # return to whatever directory we were in before + popd > /dev/null +} + +# This function accepts one optional argument, the make target to be used. +# If ommitted, gcc_do_make will try to guess whether it should use all, +# profiledbootstrap, or bootstrap-lean depending on CTARGET and arch. An +# example of how to use this function: +# +# gcc_do_make all-target-libstdc++-v3 +# +# In addition to the target to be used, the following variables alter the +# behavior of this function: +# +# LDFLAGS +# Flags to pass to ld +# +# STAGE1_CFLAGS +# CFLAGS to use during stage1 of a gcc bootstrap +# +# BOOT_CFLAGS +# CFLAGS to use during stages 2+3 of a gcc bootstrap. +# +# Travis Tilley (04 Sep 2004) +# +gcc_do_make() { + # Fix for libtool-portage.patch + local OLDS=${S} + S=${WORKDIR}/build + + # Set make target to $1 if passed + [[ -n $1 ]] && GCC_MAKE_TARGET=$1 + # default target + if is_crosscompile || tc-is-cross-compiler ; then + # 3 stage bootstrapping doesnt quite work when you cant run the + # resulting binaries natively ^^; + GCC_MAKE_TARGET=${GCC_MAKE_TARGET-all} + else + GCC_MAKE_TARGET=${GCC_MAKE_TARGET-bootstrap-lean} + fi + + # the gcc docs state that parallel make isnt supported for the + # profiledbootstrap target, as collisions in profile collecting may occur. + [[ ${GCC_MAKE_TARGET} == "profiledbootstrap" ]] && export MAKEOPTS="${MAKEOPTS} -j1" + + # boundschecking seems to introduce parallel build issues + want_boundschecking && export MAKEOPTS="${MAKEOPTS} -j1" + + if [[ ${GCC_MAKE_TARGET} == "all" ]] ; then + STAGE1_CFLAGS=${STAGE1_CFLAGS-"${CFLAGS}"} + elif [[ $(gcc-version) == "3.4" && ${GCC_BRANCH_VER} == "3.4" ]] && gcc-specs-ssp ; then + # See bug #79852 + STAGE1_CFLAGS=${STAGE1_CFLAGS-"-O2"} + else + STAGE1_CFLAGS=${STAGE1_CFLAGS-"-O"} + fi + + if is_crosscompile; then + # In 3.4, BOOT_CFLAGS is never used on a crosscompile... + # but I'll leave this in anyways as someone might have had + # some reason for putting it in here... --eradicator + BOOT_CFLAGS=${BOOT_CFLAGS-"-O2"} + else + # we only want to use the system's CFLAGS if not building a + # cross-compiler. + BOOT_CFLAGS=${BOOT_CFLAGS-"$(get_abi_CFLAGS) ${CFLAGS}"} + fi + + pushd "${WORKDIR}"/build + + emake \ + LDFLAGS="${LDFLAGS}" \ + STAGE1_CFLAGS="${STAGE1_CFLAGS}" \ + LIBPATH="${LIBPATH}" \ + BOOT_CFLAGS="${BOOT_CFLAGS}" \ + ${GCC_MAKE_TARGET} \ + || die "emake failed with ${GCC_MAKE_TARGET}" + + if ! is_crosscompile && ! use nocxx && use doc ; then + if type -p doxygen > /dev/null ; then + if tc_version_is_at_least 4.3 ; then + cd "${CTARGET}"/libstdc++-v3/doc + emake doc-man-doxygen || ewarn "failed to make docs" + elif tc_version_is_at_least 3.0 ; then + cd "${CTARGET}"/libstdc++-v3 + emake doxygen-man || ewarn "failed to make docs" + fi + else + ewarn "Skipping libstdc++ manpage generation since you don't have doxygen installed" + fi + fi + + popd +} + +# This function will add ${GCC_CONFIG_VER} to the names of all shared libraries in the +# directory specified to avoid filename collisions between multiple slotted +# non-versioned gcc targets. If no directory is specified, it is assumed that +# you want -all- shared objects to have ${GCC_CONFIG_VER} added. Example +# +# add_version_to_shared ${D}/usr/$(get_libdir) +# +# Travis Tilley (05 Sep 2004) +# +add_version_to_shared() { + local sharedlib sharedlibdir + [[ -z $1 ]] \ + && sharedlibdir=${D} \ + || sharedlibdir=$1 + + for sharedlib in $(find ${sharedlibdir} -name *.so.*) ; do + if [[ ! -L ${sharedlib} ]] ; then + einfo "Renaming `basename "${sharedlib}"` to `basename "${sharedlib/.so*/}-${GCC_CONFIG_VER}.so.${sharedlib/*.so./}"`" + mv "${sharedlib}" "${sharedlib/.so*/}-${GCC_CONFIG_VER}.so.${sharedlib/*.so./}" \ + || die + pushd `dirname "${sharedlib}"` > /dev/null || die + ln -sf "`basename "${sharedlib/.so*/}-${GCC_CONFIG_VER}.so.${sharedlib/*.so./}"`" \ + "`basename "${sharedlib}"`" || die + popd > /dev/null || die + fi + done +} + +# This is mostly a stub function to be overwritten in an ebuild +gcc_do_filter_flags() { + strip-flags + + # In general gcc does not like optimization, and add -O2 where + # it is safe. This is especially true for gcc 3.3 + 3.4 + replace-flags -O? -O2 + + # ... sure, why not? + strip-unsupported-flags + + # dont want to funk ourselves + filter-flags '-mabi*' -m31 -m32 -m64 + + case ${GCC_BRANCH_VER} in + 3.2|3.3) + replace-cpu-flags k8 athlon64 opteron i686 x86-64 + replace-cpu-flags pentium-m pentium3m pentium3 + case $(tc-arch) in + amd64|x86) filter-flags '-mtune=*' ;; + # in gcc 3.3 there is a bug on ppc64 where if -mcpu is used, + # the compiler wrongly assumes a 32bit target + ppc64) filter-flags "-mcpu=*";; + esac + case $(tc-arch) in + amd64) replace-cpu-flags core2 nocona;; + x86) replace-cpu-flags core2 prescott;; + esac + + replace-cpu-flags G3 750 + replace-cpu-flags G4 7400 + replace-cpu-flags G5 7400 + + # XXX: should add a sed or something to query all supported flags + # from the gcc source and trim everything else ... + filter-flags -f{no-,}unit-at-a-time -f{no-,}web -mno-tls-direct-seg-refs + filter-flags -f{no-,}stack-protector{,-all} + filter-flags -fvisibility-inlines-hidden -fvisibility=hidden + ;; + 3.4|4.*) + case $(tc-arch) in + x86|amd64) filter-flags '-mcpu=*';; + *-macos) + # http://gcc.gnu.org/bugzilla/show_bug.cgi?id=25127 + [[ ${GCC_BRANCH_VER} == 4.0 || ${GCC_BRANCH_VER} == 4.1 ]] && \ + filter-flags '-mcpu=*' '-march=*' '-mtune=*' + ;; + esac + ;; + esac + + # Compile problems with these (bug #6641 among others)... + #filter-flags "-fno-exceptions -fomit-frame-pointer -fforce-addr" + + # CFLAGS logic (verified with 3.4.3): + # CFLAGS: + # This conflicts when creating a crosscompiler, so set to a sane + # default in this case: + # used in ./configure and elsewhere for the native compiler + # used by gcc when creating libiberty.a + # used by xgcc when creating libstdc++ (and probably others)! + # this behavior should be removed... + # + # CXXFLAGS: + # used by xgcc when creating libstdc++ + # + # STAGE1_CFLAGS (not used in creating a crosscompile gcc): + # used by ${CHOST}-gcc for building stage1 compiler + # + # BOOT_CFLAGS (not used in creating a crosscompile gcc): + # used by xgcc for building stage2/3 compiler + + if is_crosscompile ; then + # Set this to something sane for both native and target + CFLAGS="-O2 -pipe" + + local VAR="CFLAGS_"${CTARGET//-/_} + CXXFLAGS=${!VAR} + fi + + export GCJFLAGS=${GCJFLAGS:-${CFLAGS}} +} + +gcc_src_compile() { + gcc_do_filter_flags + einfo "CFLAGS=\"${CFLAGS}\"" + einfo "CXXFLAGS=\"${CXXFLAGS}\"" + + # For hardened gcc 4.3 piepatchset to build the hardened specs + # file (build.specs) to use when building gcc. + if ! tc_version_is_at_least 4.4 && want_minispecs ; then + setup_minispecs_gcc_build_specs + fi + # Build in a separate build tree + mkdir -p "${WORKDIR}"/build + pushd "${WORKDIR}"/build > /dev/null + + # Install our pre generated manpages if we do not have perl ... + [[ ! -x /usr/bin/perl ]] && [[ -n ${MAN_VER} ]] && \ + unpack gcc-${MAN_VER}-manpages.tar.bz2 + + einfo "Configuring ${PN} ..." + gcc_do_configure + + touch "${S}"/gcc/c-gperf.h + + # Do not make manpages if we do not have perl ... + [[ ! -x /usr/bin/perl ]] \ + && find "${WORKDIR}"/build -name '*.[17]' | xargs touch + + einfo "Compiling ${PN} ..." + gcc_do_make ${GCC_MAKE_TARGET} + + # Do not create multiple specs files for PIE+SSP if boundschecking is in + # USE, as we disable PIE+SSP when it is. + if [[ ${ETYPE} == "gcc-compiler" ]] && want_split_specs && ! want_minispecs; then + split_out_specs_files || die "failed to split out specs" + fi + + popd > /dev/null +} + +gcc_src_test() { + cd "${WORKDIR}"/build + emake -j1 -k check || ewarn "check failed and that sucks :(" +} + +gcc-library_src_install() { + # Do the 'make install' from the build directory + cd "${WORKDIR}"/build + S=${WORKDIR}/build \ + emake -j1 \ + DESTDIR="${D}" \ + prefix=${PREFIX} \ + bindir=${BINPATH} \ + includedir=${LIBPATH}/include \ + datadir=${DATAPATH} \ + mandir=${DATAPATH}/man \ + infodir=${DATAPATH}/info \ + LIBPATH="${LIBPATH}" \ + ${GCC_INSTALL_TARGET} || die + + if [[ ${GCC_LIB_COMPAT_ONLY} == "true" ]] ; then + rm -rf "${D}"${INCLUDEPATH} + rm -rf "${D}"${DATAPATH} + pushd "${D}"${LIBPATH}/ + rm *.a *.la *.so + popd + fi + + if [[ -n ${GCC_LIB_USE_SUBDIR} ]] ; then + mkdir -p "${WORKDIR}"/${GCC_LIB_USE_SUBDIR}/ + mv "${D}"${LIBPATH}/* "${WORKDIR}"/${GCC_LIB_USE_SUBDIR}/ + mv "${WORKDIR}"/${GCC_LIB_USE_SUBDIR}/ "${D}"${LIBPATH} + + dodir /etc/env.d + echo "LDPATH=\"${LIBPATH}/${GCC_LIB_USE_SUBDIR}/\"" >> "${D}"/etc/env.d/99${PN} + fi + + if [[ ${GCC_VAR_TYPE} == "non-versioned" ]] ; then + # if we're not using versioned directories, we need to use versioned + # filenames. + add_version_to_shared + fi +} + +gcc-compiler_src_install() { + local x= + + cd "${WORKDIR}"/build + # Do allow symlinks in private gcc include dir as this can break the build + find gcc/include*/ -type l -print0 | xargs -0 rm -f + # Remove generated headers, as they can cause things to break + # (ncurses, openssl, etc). + for x in $(find gcc/include*/ -name '*.h') ; do + grep -q 'It has been auto-edited by fixincludes from' "${x}" \ + && rm -f "${x}" + done + # Do the 'make install' from the build directory + S=${WORKDIR}/build \ + emake -j1 DESTDIR="${D}" install || die + # Punt some tools which are really only useful while building gcc + find "${D}" -name install-tools -prune -type d -exec rm -rf "{}" \; + # This one comes with binutils + find "${D}" -name libiberty.a -exec rm -f "{}" \; + + # Move the libraries to the proper location + gcc_movelibs + + # Basic sanity check + if ! is_crosscompile ; then + local EXEEXT + eval $(grep ^EXEEXT= "${WORKDIR}"/build/gcc/config.log) + [[ -r ${D}${BINPATH}/gcc${EXEEXT} ]] || die "gcc not found in ${D}" + fi + + dodir /etc/env.d/gcc + create_gcc_env_entry + + if want_split_specs ; then + if use hardened ; then + create_gcc_env_entry vanilla + fi + ! use hardened && hardened_gcc_works && create_gcc_env_entry hardened + if hardened_gcc_works || hardened_gcc_works pie ; then + create_gcc_env_entry hardenednossp + fi + if hardened_gcc_works || hardened_gcc_works ssp ; then + create_gcc_env_entry hardenednopie + fi + create_gcc_env_entry hardenednopiessp + + insinto ${LIBPATH} + doins "${WORKDIR}"/build/*.specs || die "failed to install specs" + fi + # Setup the gcc_env_entry for hardened gcc 4 with minispecs + if want_minispecs ; then + copy_minispecs_gcc_specs + fi + # Make sure we dont have stuff lying around that + # can nuke multiple versions of gcc + + gcc_slot_java + + # Move to compiler-specific directories + [[ -f ${D}${STDCXX_INCDIR}/cxxabi.h ]] && \ + mv -f "${D}"${STDCXX_INCDIR}/cxxabi.h "${D}"${LIBPATH}/include/ + + # These should be symlinks + dodir /usr/bin + cd "${D}"${BINPATH} + for x in cpp gcc g++ c++ g77 gcj gcjh gfortran ; do + # For some reason, g77 gets made instead of ${CTARGET}-g77... + # this should take care of that + [[ -f ${x} ]] && mv ${x} ${CTARGET}-${x} + + if [[ -f ${CTARGET}-${x} ]] && ! is_crosscompile ; then + ln -sf ${CTARGET}-${x} ${x} + + # Create version-ed symlinks + dosym ${BINPATH}/${CTARGET}-${x} \ + /usr/bin/${CTARGET}-${x}-${GCC_CONFIG_VER} + dosym ${BINPATH}/${CTARGET}-${x} \ + /usr/bin/${x}-${GCC_CONFIG_VER} + fi + + if [[ -f ${CTARGET}-${x}-${GCC_CONFIG_VER} ]] ; then + rm -f ${CTARGET}-${x}-${GCC_CONFIG_VER} + ln -sf ${CTARGET}-${x} ${CTARGET}-${x}-${GCC_CONFIG_VER} + fi + done + + # I do not know if this will break gcj stuff, so I'll only do it for + # objc for now; basically "ffi.h" is the correct file to include, + # but it gets installed in .../GCCVER/include and yet it does + # "#include " which (correctly, as it's an "extra" file) + # is installed in .../GCCVER/include/libffi; the following fixes + # ffi.'s include of ffitarget.h - Armando Di Cianno + if [[ -d ${D}${LIBPATH}/include/libffi ]] ; then + mv -i "${D}"${LIBPATH}/include/libffi/* "${D}"${LIBPATH}/include || die + rm -r "${D}"${LIBPATH}/include/libffi || die + fi + + # Now do the fun stripping stuff + env RESTRICT="" CHOST=${CHOST} prepstrip "${D}${BINPATH}" + env RESTRICT="" CHOST=${CTARGET} prepstrip "${D}${LIBPATH}" + # gcc used to install helper binaries in lib/ but then moved to libexec/ + [[ -d ${D}${PREFIX}/libexec/gcc ]] && \ + env RESTRICT="" CHOST=${CHOST} prepstrip "${D}${PREFIX}/libexec/gcc/${CTARGET}/${GCC_CONFIG_VER}" + + cd "${S}" + if is_crosscompile; then + rm -rf "${D}"/usr/share/{man,info} + rm -rf "${D}"${DATAPATH}/{man,info} + else + local cxx_mandir=${WORKDIR}/build/${CTARGET}/libstdc++-v3/docs/doxygen/man + if [[ -d ${cxx_mandir} ]] ; then + # clean bogus manpages #113902 + find "${cxx_mandir}" -name '*_build_*' -exec rm {} \; + cp -r "${cxx_mandir}"/man? "${D}/${DATAPATH}"/man/ + fi + has noinfo ${FEATURES} \ + && rm -r "${D}/${DATAPATH}"/info \ + || prepinfo "${DATAPATH}" + has noman ${FEATURES} \ + && rm -r "${D}/${DATAPATH}"/man \ + || prepman "${DATAPATH}" + fi + # prune empty dirs left behind + for x in 1 2 3 4 ; do + find "${D}" -type d -exec rmdir "{}" \; >& /dev/null + done + + # install testsuite results + if use test; then + docinto testsuite + find "${WORKDIR}"/build -type f -name "*.sum" -print0 | xargs -0 dodoc + find "${WORKDIR}"/build -type f -path "*/testsuite/*.log" -print0 \ + | xargs -0 dodoc + fi + + # Rather install the script, else portage with changing $FILESDIR + # between binary and source package borks things .... + if ! is_crosscompile ; then + insinto "${DATAPATH}" + if tc_version_is_at_least 4.0 ; then + newins "${GCC_FILESDIR}"/awk/fixlafiles.awk-no_gcc_la fixlafiles.awk || die + find "${D}/${LIBPATH}" -name libstdc++.la -type f -exec rm "{}" \; + else + doins "${GCC_FILESDIR}"/awk/fixlafiles.awk || die + fi + exeinto "${DATAPATH}" + doexe "${GCC_FILESDIR}"/fix_libtool_files.sh || die + doexe "${GCC_FILESDIR}"/c{89,99} || die + fi + + # use gid of 0 because some stupid ports don't have + # the group 'root' set to gid 0 + chown -R root:0 "${D}"${LIBPATH} + + # Move pretty-printers to gdb datadir to shut ldconfig up + gdbdir=/usr/share/gdb/auto-load + for module in $(find "${D}" -iname "*-gdb.py" -print); do + insinto ${gdbdir}/$(dirname "${module/${D}/}" | \ + sed -e "s:/lib/:/$(get_libdir)/:g") + doins "${module}" + rm "${module}" + done +} + +gcc_slot_java() { + local x + + # Move Java headers to compiler-specific dir + for x in "${D}"${PREFIX}/include/gc*.h "${D}"${PREFIX}/include/j*.h ; do + [[ -f ${x} ]] && mv -f "${x}" "${D}"${LIBPATH}/include/ + done + for x in gcj gnu java javax org ; do + if [[ -d ${D}${PREFIX}/include/${x} ]] ; then + dodir /${LIBPATH}/include/${x} + mv -f "${D}"${PREFIX}/include/${x}/* "${D}"${LIBPATH}/include/${x}/ + rm -rf "${D}"${PREFIX}/include/${x} + fi + done + + if [[ -d ${D}${PREFIX}/lib/security ]] || [[ -d ${D}${PREFIX}/$(get_libdir)/security ]] ; then + dodir /${LIBPATH}/security + mv -f "${D}"${PREFIX}/lib*/security/* "${D}"${LIBPATH}/security + rm -rf "${D}"${PREFIX}/lib*/security + fi + + # Move libgcj.spec to compiler-specific directories + [[ -f ${D}${PREFIX}/lib/libgcj.spec ]] && \ + mv -f "${D}"${PREFIX}/lib/libgcj.spec "${D}"${LIBPATH}/libgcj.spec + + # SLOT up libgcj.pc (and let gcc-config worry about links) + local libgcj + for libgcj in "${D}${PREFIX}"/lib/pkgconfig/libgcj*.pc ; do + if [[ -f ${libgcj} ]] ; then + sed -i "/^libdir=/s:=.*:=${LIBPATH}:" "${libgcj}" + if ! is_crosscompile; then + mv "${libgcj}" "${D}"/${PREFIX}/lib/pkgconfig/libgcj-${GCC_PV}.pc || die + else + dodir /${PREFIX}/${CTARGET}/${PREFIX}/lib/pkgconfig + mv "${libgcj}" "${D}"/${PREFIX}/${CTARGET}/${PREFIX}/lib/pkgconfig/libgcj-${GCC_PV}.pc || die + fi + fi + done + + # Move JNI native (CTARGET) java libraries + if is_crosscompile; then + dodir /${PREFIX}/${CTARGET}/${PREFIX}/lib + find "${D}"/${PREFIX}/lib -name 'gcj-*' -type d -exec \ + mv {} "${D}"/${PREFIX}/${CTARGET}/${PREFIX}/lib/ \; + fi + + # Rename jar because it could clash with Kaffe's jar if this gcc is + # primary compiler (aka don't have the - extension) + cd "${D}"${BINPATH} + [[ -f jar ]] && mv -f jar gcj-jar +} + +# Move around the libs to the right location. For some reason, +# when installing gcc, it dumps internal libraries into /usr/lib +# instead of the private gcc lib path +gcc_movelibs() { + # older versions of gcc did not support --print-multi-os-directory + tc_version_is_at_least 3.0 || return 0 + + local multiarg removedirs="" + for multiarg in $($(XGCC) -print-multi-lib) ; do + multiarg=${multiarg#*;} + multiarg=${multiarg//@/ -} + + local OS_MULTIDIR=$($(XGCC) ${multiarg} --print-multi-os-directory) + local MULTIDIR=$($(XGCC) ${multiarg} --print-multi-directory) + local TODIR=${D}${LIBPATH}/${MULTIDIR} + local FROMDIR= + + [[ -d ${TODIR} ]] || mkdir -p ${TODIR} + + for FROMDIR in \ + ${LIBPATH}/${OS_MULTIDIR} \ + ${LIBPATH}/../${MULTIDIR} \ + ${PREFIX}/lib/${OS_MULTIDIR} \ + ${PREFIX}/${CTARGET}/lib/${OS_MULTIDIR} \ + ${PREFIX}/lib/${MULTIDIR} + do + removedirs="${removedirs} ${FROMDIR}" + FROMDIR=${D}${FROMDIR} + if [[ ${FROMDIR} != "${TODIR}" && -d ${FROMDIR} ]] ; then + local files=$(find "${FROMDIR}" -maxdepth 1 ! -type d 2>/dev/null) + if [[ -n ${files} ]] ; then + mv ${files} "${TODIR}" + fi + fi + done + fix_libtool_libdir_paths "${LIBPATH}/${MULTIDIR}" + done + + # We remove directories separately to avoid this case: + # mv SRC/lib/../lib/*.o DEST + # rmdir SRC/lib/../lib/ + # mv SRC/lib/../lib32/*.o DEST # Bork + for FROMDIR in ${removedirs} ; do + rmdir "${D}"${FROMDIR} >& /dev/null + done + find "${D}" -type d | xargs rmdir >& /dev/null +} + +#----<< src_* >>---- + +#---->> unorganized crap in need of refactoring follows + +# gcc_quick_unpack will unpack the gcc tarball and patches in a way that is +# consistant with the behavior of get_gcc_src_uri. The only patch it applies +# itself is the branch update if present. +# +# Travis Tilley (03 Sep 2004) +# +gcc_quick_unpack() { + pushd "${WORKDIR}" > /dev/null + export PATCH_GCC_VER=${PATCH_GCC_VER:-${GCC_RELEASE_VER}} + export UCLIBC_GCC_VER=${UCLIBC_GCC_VER:-${PATCH_GCC_VER}} + export PIE_GCC_VER=${PIE_GCC_VER:-${GCC_RELEASE_VER}} + export PP_GCC_VER=${PP_GCC_VER:-${GCC_RELEASE_VER}} + export HTB_GCC_VER=${HTB_GCC_VER:-${GCC_RELEASE_VER}} + export SPECS_GCC_VER=${SPECS_GCC_VER:-${GCC_RELEASE_VER}} + + if [[ -n ${GCC_A_FAKEIT} ]] ; then + unpack ${GCC_A_FAKEIT} + elif [[ -n ${PRERELEASE} ]] ; then + unpack gcc-${PRERELEASE}.tar.bz2 + elif [[ -n ${SNAPSHOT} ]] ; then + unpack gcc-${SNAPSHOT}.tar.bz2 + else + unpack gcc-${GCC_RELEASE_VER}.tar.bz2 + # We want branch updates to be against a release tarball + if [[ -n ${BRANCH_UPDATE} ]] ; then + pushd "${S}" > /dev/null + epatch "${DISTDIR}"/gcc-${GCC_RELEASE_VER}-branch-update-${BRANCH_UPDATE}.patch.bz2 + popd > /dev/null + fi + fi + + if [[ -n ${D_VER} ]] && use d ; then + pushd "${S}"/gcc > /dev/null + unpack gdc-${D_VER}-src.tar.bz2 + cd .. + ebegin "Adding support for the D language" + ./gcc/d/setup-gcc.sh >& "${T}"/dgcc.log + if ! eend $? ; then + eerror "The D gcc package failed to apply" + eerror "Please include this log file when posting a bug report:" + eerror " ${T}/dgcc.log" + die "failed to include the D language" + fi + popd > /dev/null + fi + + [[ -n ${PATCH_VER} ]] && \ + unpack gcc-${PATCH_GCC_VER}-patches-${PATCH_VER}.tar.bz2 + + [[ -n ${UCLIBC_VER} ]] && \ + unpack gcc-${UCLIBC_GCC_VER}-uclibc-patches-${UCLIBC_VER}.tar.bz2 + + if want_ssp ; then + if [[ -n ${PP_FVER} ]] ; then + # The gcc 3.4 propolice versions are meant to be unpacked to ${S} + pushd "${S}" > /dev/null + unpack protector-${PP_FVER}.tar.gz + popd > /dev/null + else + unpack gcc-${PP_GCC_VER}-ssp-${PP_VER}.tar.bz2 + fi + fi + + if want_pie ; then + if [[ -n ${PIE_CORE} ]] ; then + unpack ${PIE_CORE} + else + unpack gcc-${PIE_GCC_VER}-piepatches-v${PIE_VER}.tar.bz2 + fi + [[ -n ${SPECS_VER} ]] && \ + unpack gcc-${SPECS_GCC_VER}-specs-${SPECS_VER}.tar.bz2 + fi + + want_boundschecking && \ + unpack "bounds-checking-gcc-${HTB_GCC_VER}-${HTB_VER}.patch.bz2" + + popd > /dev/null +} + +# Exclude any unwanted patches, as specified by the following variables: +# +# GENTOO_PATCH_EXCLUDE +# List of filenames, relative to ${WORKDIR}/patch/ +# +# PIEPATCH_EXCLUDE +# List of filenames, relative to ${WORKDIR}/piepatch/ +# +# Travis Tilley (03 Sep 2004) +# +exclude_gcc_patches() { + local i + for i in ${GENTOO_PATCH_EXCLUDE} ; do + if [[ -f ${WORKDIR}/patch/${i} ]] ; then + einfo "Excluding patch ${i}" + rm -f "${WORKDIR}"/patch/${i} || die "failed to delete ${i}" + fi + done + for i in ${PIEPATCH_EXCLUDE} ; do + if [[ -f ${WORKDIR}/piepatch/${i} ]] ; then + einfo "Excluding piepatch ${i}" + rm -f "${WORKDIR}"/piepatch/${i} || die "failed to delete ${i}" + fi + done +} + +# Try to apply some stub patches so that gcc won't error out when +# passed parameters like -fstack-protector but no ssp is found +do_gcc_stub() { + local v stub_patch="" + for v in ${GCC_RELEASE_VER} ${GCC_BRANCH_VER} ; do + stub_patch=${GCC_FILESDIR}/stubs/gcc-${v}-$1-stub.patch + if [[ -e ${stub_patch} ]] && ! use vanilla ; then + EPATCH_SINGLE_MSG="Applying stub patch for $1 ..." \ + epatch "${stub_patch}" + return 0 + fi + done +} + +do_gcc_HTB_patches() { + if ! want_boundschecking || \ + (want_ssp && [[ ${HTB_EXCLUSIVE} == "true" ]]) + then + do_gcc_stub htb + return 0 + fi + + # modify the bounds checking patch with a regression patch + epatch "${WORKDIR}/bounds-checking-gcc-${HTB_GCC_VER}-${HTB_VER}.patch" + BRANDING_GCC_PKGVERSION="${BRANDING_GCC_PKGVERSION}, HTB-${HTB_GCC_VER}-${HTB_VER}" +} + +# patch in ProPolice Stack Smashing protection +do_gcc_SSP_patches() { + # PARISC has no love ... it's our stack :( + if [[ $(tc-arch) == "hppa" ]] || \ + ! want_ssp || \ + (want_boundschecking && [[ ${HTB_EXCLUSIVE} == "true" ]]) + then + do_gcc_stub ssp + return 0 + fi + + local ssppatch + local sspdocs + + if [[ -n ${PP_FVER} ]] ; then + # Etoh keeps changing where files are and what the patch is named + if tc_version_is_at_least 3.4.1 ; then + # >3.4.1 uses version in patch name, and also includes docs + ssppatch="${S}/gcc_${PP_VER}.dif" + sspdocs="yes" + elif tc_version_is_at_least 3.4.0 ; then + # >3.4 put files where they belong and 3_4 uses old patch name + ssppatch="${S}/protector.dif" + sspdocs="no" + elif tc_version_is_at_least 3.2.3 ; then + # earlier versions have no directory structure or docs + mv "${S}"/protector.{c,h} "${S}"/gcc + ssppatch="${S}/protector.dif" + sspdocs="no" + fi + else + # Just start packaging the damn thing ourselves + mv "${WORKDIR}"/ssp/protector.{c,h} "${S}"/gcc/ + ssppatch=${WORKDIR}/ssp/gcc-${PP_GCC_VER}-ssp.patch + # allow boundschecking and ssp to get along + (want_boundschecking && [[ -e ${WORKDIR}/ssp/htb-ssp.patch ]]) \ + && patch -s "${ssppatch}" "${WORKDIR}"/ssp/htb-ssp.patch + fi + + [[ -z ${ssppatch} ]] && die "Sorry, SSP is not supported in this version" + epatch ${ssppatch} + + if [[ ${PN} == "gcc" && ${sspdocs} == "no" ]] ; then + epatch "${GCC_FILESDIR}"/pro-police-docs.patch + fi + + # Don't build crtbegin/end with ssp + sed -e 's|^CRTSTUFF_CFLAGS = |CRTSTUFF_CFLAGS = -fno-stack-protector |'\ + -i gcc/Makefile.in || die "Failed to update crtstuff!" + + # if gcc in a stage3 defaults to ssp, is version 3.4.0 and a stage1 is built + # the build fails building timevar.o w/: + # cc1: stack smashing attack in function ix86_split_to_parts() + if use build && tc_version_is_at_least 3.4.0 ; then + if gcc -dumpspecs | grep -q "fno-stack-protector:" ; then + epatch "${GCC_FILESDIR}"/3.4.0/gcc-3.4.0-cc1-no-stack-protector.patch + fi + fi + + BRANDING_GCC_PKGVERSION="${BRANDING_GCC_PKGVERSION}, ssp-${PP_FVER:-${PP_GCC_VER}-${PP_VER}}" + if want_libssp ; then + update_gcc_for_libssp + else + update_gcc_for_libc_ssp + fi + + # Don't build libgcc with ssp + sed -e 's|^\(LIBGCC2_CFLAGS.*\)$|\1 -fno-stack-protector|' \ + -i gcc/Makefile.in || die "Failed to update gcc!" +} + +# If glibc or uclibc has been patched to provide the necessary symbols itself, +# then lets use those for SSP instead of libgcc. +update_gcc_for_libc_ssp() { + if libc_has_ssp ; then + einfo "Updating gcc to use SSP from libc ..." + sed -e 's|^\(LIBGCC2_CFLAGS.*\)$|\1 -D_LIBC_PROVIDES_SSP_|' \ + -i "${S}"/gcc/Makefile.in || die "Failed to update gcc!" + fi +} + +# a split out non-libc non-libgcc ssp requires additional spec logic changes +update_gcc_for_libssp() { + einfo "Updating gcc to use SSP from libssp..." + sed -e 's|^\(INTERNAL_CFLAGS.*\)$|\1 -D_LIBSSP_PROVIDES_SSP_|' \ + -i "${S}"/gcc/Makefile.in || die "Failed to update gcc!" +} + +# do various updates to PIE logic +do_gcc_PIE_patches() { + if ! want_pie || \ + (want_boundschecking && [[ ${HTB_EXCLUSIVE} == "true" ]]) + then + return 0 + fi + + want_boundschecking \ + && rm -f "${WORKDIR}"/piepatch/*/*-boundschecking-no.patch* \ + || rm -f "${WORKDIR}"/piepatch/*/*-boundschecking-yes.patch* + + use vanilla && return 0 + + if tc_version_is_at_least 4.3.2; then + guess_patch_type_in_dir "${WORKDIR}"/piepatch/ + EPATCH_MULTI_MSG="Applying pie patches ..." \ + epatch "${WORKDIR}"/piepatch/ + else + guess_patch_type_in_dir "${WORKDIR}"/piepatch/upstream + + # corrects startfile/endfile selection and shared/static/pie flag usage + EPATCH_MULTI_MSG="Applying upstream pie patches ..." \ + epatch "${WORKDIR}"/piepatch/upstream + # adds non-default pie support (rs6000) + EPATCH_MULTI_MSG="Applying non-default pie patches ..." \ + epatch "${WORKDIR}"/piepatch/nondef + # adds default pie support (rs6000 too) if DEFAULT_PIE[_SSP] is defined + EPATCH_MULTI_MSG="Applying default pie patches ..." \ + epatch "${WORKDIR}"/piepatch/def + fi + # we want to be able to control the pie patch logic via something other + # than ALL_CFLAGS... + sed -e '/^ALL_CFLAGS/iHARD_CFLAGS = ' \ + -e 's|^ALL_CFLAGS = |ALL_CFLAGS = $(HARD_CFLAGS) |' \ + -i "${S}"/gcc/Makefile.in + + BRANDING_GCC_PKGVERSION="${BRANDING_GCC_PKGVERSION}, pie-${PIE_VER}" +} + +should_we_gcc_config() { + # we always want to run gcc-config if we're bootstrapping, otherwise + # we might get stuck with the c-only stage1 compiler + use bootstrap && return 0 + use build && return 0 + + # if the current config is invalid, we definitely want a new one + # Note: due to bash quirkiness, the following must not be 1 line + local curr_config + curr_config=$(env -i ROOT="${ROOT}" gcc-config -c ${CTARGET} 2>&1) || return 0 + + # if the previously selected config has the same major.minor (branch) as + # the version we are installing, then it will probably be uninstalled + # for being in the same SLOT, make sure we run gcc-config. + local curr_config_ver=$(env -i ROOT="${ROOT}" gcc-config -S ${curr_config} | awk '{print $2}') + + local curr_branch_ver=$(get_version_component_range 1-2 ${curr_config_ver}) + + # If we're using multislot, just run gcc-config if we're installing + # to the same profile as the current one. + use multislot && return $([[ ${curr_config_ver} == ${GCC_CONFIG_VER} ]]) + + if [[ ${curr_branch_ver} == ${GCC_BRANCH_VER} ]] ; then + return 0 + else + # if we're installing a genuinely different compiler version, + # we should probably tell the user -how- to switch to the new + # gcc version, since we're not going to do it for him/her. + # We don't want to switch from say gcc-3.3 to gcc-3.4 right in + # the middle of an emerge operation (like an 'emerge -e world' + # which could install multiple gcc versions). + einfo "The current gcc config appears valid, so it will not be" + einfo "automatically switched for you. If you would like to" + einfo "switch to the newly installed gcc version, do the" + einfo "following:" + echo + einfo "gcc-config ${CTARGET}-${GCC_CONFIG_VER}" + einfo "source /etc/profile" + echo + ebeep + return 1 + fi +} + +do_gcc_config() { + if ! should_we_gcc_config ; then + env -i ROOT="${ROOT}" gcc-config --use-old --force + return 0 + fi + + local current_gcc_config="" current_specs="" use_specs="" + + current_gcc_config=$(env -i ROOT="${ROOT}" gcc-config -c ${CTARGET} 2>/dev/null) + if [[ -n ${current_gcc_config} ]] ; then + # figure out which specs-specific config is active + current_specs=$(gcc-config -S ${current_gcc_config} | awk '{print $3}') + [[ -n ${current_specs} ]] && use_specs=-${current_specs} + fi + if [[ -n ${use_specs} ]] && \ + [[ ! -e ${ROOT}/etc/env.d/gcc/${CTARGET}-${GCC_CONFIG_VER}${use_specs} ]] + then + ewarn "The currently selected specs-specific gcc config," + ewarn "${current_specs}, doesn't exist anymore. This is usually" + ewarn "due to enabling/disabling hardened or switching to a version" + ewarn "of gcc that doesnt create multiple specs files. The default" + ewarn "config will be used, and the previous preference forgotten." + ebeep + epause + use_specs="" + fi + + gcc-config ${CTARGET}-${GCC_CONFIG_VER}${use_specs} +} + +# This function allows us to gentoo-ize gcc's version number and bugzilla +# URL without needing to use patches. +gcc_version_patch() { + # gcc-4.3+ has configure flags (whoo!) + tc_version_is_at_least 4.3 && return 0 + + local version_string=${GCC_CONFIG_VER} + [[ -n ${BRANCH_UPDATE} ]] && version_string="${version_string} ${BRANCH_UPDATE}" + + einfo "patching gcc version: ${version_string} (${BRANDING_GCC_PKGVERSION})" + + if grep -qs VERSUFFIX "${S}"/gcc/version.c ; then + sed -i -e "s~VERSUFFIX \"\"~VERSUFFIX \" (${BRANDING_GCC_PKGVERSION})\"~" \ + "${S}"/gcc/version.c || die "failed to update VERSUFFIX with Gentoo branding" + else + version_string="${version_string} (${BRANDING_GCC_PKGVERSION})" + sed -i -e "s~\(const char version_string\[\] = \"\).*\(\".*\)~\1$version_string\2~" \ + "${S}"/gcc/version.c || die "failed to update version.c with Gentoo branding." + fi + sed -i -e 's~gcc\.gnu\.org\/bugs\.html~bugs\.gentoo\.org\/~' \ + "${S}"/gcc/version.c || die "Failed to change the bug URL" +} + +# The purpose of this DISGUSTING gcc multilib hack is to allow 64bit libs +# to live in lib instead of lib64 where they belong, with 32bit libraries +# in lib32. This hack has been around since the beginning of the amd64 port, +# and we're only now starting to fix everything that's broken. Eventually +# this should go away. +# +# Travis Tilley (03 Sep 2004) +# +disgusting_gcc_multilib_HACK() { + local config + local libdirs + if has_multilib_profile ; then + case $(tc-arch) in + amd64) + config="i386/t-linux64" + libdirs="../$(get_abi_LIBDIR amd64) ../$(get_abi_LIBDIR x86)" \ + ;; + ppc64) + config="rs6000/t-linux64" + libdirs="../$(get_abi_LIBDIR ppc64) ../$(get_abi_LIBDIR ppc)" \ + ;; + esac + else + die "Your profile is no longer supported by portage." + fi + + einfo "updating multilib directories to be: ${libdirs}" + sed -i -e "s:^MULTILIB_OSDIRNAMES.*:MULTILIB_OSDIRNAMES = ${libdirs}:" "${S}"/gcc/config/${config} +} + +disable_multilib_libjava() { + if is_gcj ; then + # We dont want a multilib libjava, so lets use this hack taken from fedora + pushd "${S}" > /dev/null + sed -i -e 's/^all: all-redirect/ifeq (\$(MULTISUBDIR),)\nall: all-redirect\nelse\nall:\n\techo Multilib libjava build disabled\nendif/' libjava/Makefile.in + sed -i -e 's/^install: install-redirect/ifeq (\$(MULTISUBDIR),)\ninstall: install-redirect\nelse\ninstall:\n\techo Multilib libjava install disabled\nendif/' libjava/Makefile.in + sed -i -e 's/^check: check-redirect/ifeq (\$(MULTISUBDIR),)\ncheck: check-redirect\nelse\ncheck:\n\techo Multilib libjava check disabled\nendif/' libjava/Makefile.in + sed -i -e 's/^all: all-recursive/ifeq (\$(MULTISUBDIR),)\nall: all-recursive\nelse\nall:\n\techo Multilib libjava build disabled\nendif/' libjava/Makefile.in + sed -i -e 's/^install: install-recursive/ifeq (\$(MULTISUBDIR),)\ninstall: install-recursive\nelse\ninstall:\n\techo Multilib libjava install disabled\nendif/' libjava/Makefile.in + sed -i -e 's/^check: check-recursive/ifeq (\$(MULTISUBDIR),)\ncheck: check-recursive\nelse\ncheck:\n\techo Multilib libjava check disabled\nendif/' libjava/Makefile.in + popd > /dev/null + fi +} + +# make sure the libtool archives have libdir set to where they actually +# -are-, and not where they -used- to be. also, any dependencies we have +# on our own .la files need to be updated. +fix_libtool_libdir_paths() { + pushd "${D}" >/dev/null + + pushd "./${1}" >/dev/null + local dir="${PWD#${D%/}}" + local allarchives=$(echo *.la) + allarchives="\(${allarchives// /\\|}\)" + popd >/dev/null + + sed -i \ + -e "/^libdir=/s:=.*:='${dir}':" \ + ./${dir}/*.la + sed -i \ + -e "/^dependency_libs=/s:/[^ ]*/${allarchives}:${LIBPATH}/\1:g" \ + $(find ./${PREFIX}/lib* -maxdepth 3 -name '*.la') \ + ./${dir}/*.la + + popd >/dev/null +} + +is_multilib() { + [[ ${GCCMAJOR} < 3 ]] && return 1 + case ${CTARGET} in + mips64*|powerpc64*|s390x*|sparc*|x86_64*) + has_multilib_profile || use multilib ;; + *-*-solaris*|*-apple-darwin*|*-mint*) + use multilib ;; + *) false ;; + esac +} + +is_cxx() { + gcc-lang-supported 'c++' || return 1 + ! use nocxx +} + +is_d() { + gcc-lang-supported d || return 1 + use d +} + +is_f77() { + gcc-lang-supported f77 || return 1 + use fortran +} + +is_f95() { + gcc-lang-supported f95 || return 1 + use fortran +} + +is_fortran() { + gcc-lang-supported fortran || return 1 + use fortran +} + +is_gcj() { + gcc-lang-supported java || return 1 + use gcj +} + +is_libffi() { + has libffi ${IUSE} || return 1 + use libffi +} + +is_objc() { + gcc-lang-supported objc || return 1 + use objc +} + +is_objcxx() { + gcc-lang-supported 'obj-c++' || return 1 + use objc++ +} + +is_ada() { + gcc-lang-supported ada || return 1 + use ada +} + +is_treelang() { + has boundschecking ${IUSE} && use boundschecking && return 1 #260532 + is_crosscompile && return 1 #199924 + gcc-lang-supported treelang || return 1 + #use treelang + return 0 +} diff --git a/eclass/x-modular.eclass b/eclass/x-modular.eclass new file mode 100644 index 0000000..1c47174 --- /dev/null +++ b/eclass/x-modular.eclass @@ -0,0 +1,619 @@ +# Copyright 1999-2005 Gentoo Foundation +# Distributed under the terms of the GNU General Public License v2 +# $Header: /var/cvsroot/gentoo-x86/eclass/x-modular.eclass,v 1.119 2010/07/04 20:42:22 dirtyepic Exp $ +# +# @ECLASS: x-modular.eclass +# @MAINTAINER: +# Donnie Berkholz , x11@gentoo.org +# @BLURB: Reduces code duplication in the modularized X11 ebuilds. +# @DESCRIPTION: +# This eclass makes trivial X ebuilds possible for apps, fonts, drivers, +# and more. Many things that would normally be done in various functions +# can be accessed by setting variables instead, such as patching, +# running eautoreconf, passing options to configure and installing docs. +# +# All you need to do in a basic ebuild is inherit this eclass and set +# DESCRIPTION, KEYWORDS and RDEPEND/DEPEND. If your package is hosted +# with the other X packages, you don't need to set SRC_URI. Pretty much +# everything else should be automatic. + +if [[ ${PV} = 9999* ]]; then + GIT_ECLASS="git" + SNAPSHOT="yes" + SRC_URI="" +fi + +MULTILIB_EXT_SOURCE_BUILD=yes + +# If we're a font package, but not the font.alias one +FONT_ECLASS="" +if [[ "${PN/#font-}" != "${PN}" ]] \ + && [[ "${CATEGORY}" = "media-fonts" ]] \ + && [[ "${PN}" != "font-alias" ]] \ + && [[ "${PN}" != "font-util" ]]; then + # Activate font code in the rest of the eclass + FONT="yes" + + # Whether to inherit the font eclass + FONT_ECLASS="font" +fi + +inherit eutils libtool multilib toolchain-funcs flag-o-matic autotools \ + ${FONT_ECLASS} ${GIT_ECLASS} + +EXPORTED_FUNCTIONS="src_unpack src_compile src_install pkg_preinst pkg_postinst pkg_postrm" + +case "${EAPI:-0}" in + 0|1) + ;; + 2) + EXPORTED_FUNCTIONS="${EXPORTED_FUNCTIONS} src_prepare src_configure" + ;; + *) + die "Unknown EAPI ${EAPI}" + ;; +esac + +# exports must be ALWAYS after inherit +EXPORT_FUNCTIONS ${EXPORTED_FUNCTIONS} + +# @ECLASS-VARIABLE: XDIR +# @DESCRIPTION: +# Directory prefix to use for everything. If you want to install to a +# non-default prefix (e.g., /opt/xorg), change XDIR. This has not been +# recently tested. You may need to uncomment the setting of datadir and +# mandir in x-modular_src_install() or add it back in if it's no longer +# there. You may also want to change the SLOT. +XDIR="/usr" + +IUSE="" +HOMEPAGE="http://xorg.freedesktop.org/" + +# @ECLASS-VARIABLE: SNAPSHOT +# @DESCRIPTION: +# If set to 'yes' and configure.ac exists, eautoreconf will run. Set +# before inheriting this eclass. +: ${SNAPSHOT:=no} + +# Set up SRC_URI for individual modular releases +BASE_INDIVIDUAL_URI="http://xorg.freedesktop.org/releases/individual" +# @ECLASS-VARIABLE: MODULE +# @DESCRIPTION: +# The subdirectory to download source from. Possible settings are app, +# doc, data, util, driver, font, lib, proto, xserver. Set above the +# inherit to override the default autoconfigured module. +if [[ -z ${MODULE} ]]; then + case ${CATEGORY} in + app-doc) MODULE="doc" ;; + media-fonts) MODULE="font" ;; + x11-apps|x11-wm) MODULE="app" ;; + x11-misc|x11-themes) MODULE="util" ;; + x11-drivers) MODULE="driver" ;; + x11-base) MODULE="xserver" ;; + x11-proto) MODULE="proto" ;; + x11-libs) MODULE="lib" ;; + esac +fi + +if [[ -n ${GIT_ECLASS} ]]; then + EGIT_REPO_URI="git://anongit.freedesktop.org/git/xorg/${MODULE}/${PN}" +else + SRC_URI="${SRC_URI} ${BASE_INDIVIDUAL_URI}/${MODULE}/${P}.tar.bz2" +fi + +SLOT="0" + +# Set the license for the package. This can be overridden by setting +# LICENSE after the inherit. Nearly all FreeDesktop-hosted X packages +# are under the MIT license. (This is what Red Hat does in their rpms) +LICENSE="MIT" + +# Set up shared dependencies +if [[ -n "${SNAPSHOT}" ]]; then +# FIXME: What's the minimal libtool version supporting arbitrary versioning? + DEPEND="${DEPEND} + >=sys-devel/libtool-1.5[lib32?] + >=sys-devel/m4-1.4" + WANT_AUTOCONF="latest" + WANT_AUTOMAKE="latest" +fi + +if [[ -n "${FONT}" ]]; then + RDEPEND="${RDEPEND} + media-fonts/encodings + x11-apps/mkfontscale + x11-apps/mkfontdir" + PDEPEND="${PDEPEND} + media-fonts/font-alias" + + # Starting with 7.0RC3, we can specify the font directory + # But oddly, we can't do the same for encodings or font-alias + +# @ECLASS-VARIABLE: FONT_DIR +# @DESCRIPTION: +# If you're creating a font package and the suffix of PN is not equal to +# the subdirectory of /usr/share/fonts/ it should install into, set +# FONT_DIR to that directory or directories. Set before inheriting this +# eclass. + : ${FONT_DIR:=${PN##*-}} + + # Fix case of font directories + FONT_DIR=${FONT_DIR/ttf/TTF} + FONT_DIR=${FONT_DIR/otf/OTF} + FONT_DIR=${FONT_DIR/type1/Type1} + FONT_DIR=${FONT_DIR/speedo/Speedo} + + # Set up configure options, wrapped so ebuilds can override if need be + if [[ -z ${FONT_OPTIONS} ]]; then + FONT_OPTIONS="--with-fontdir=\"/usr/share/fonts/${FONT_DIR}\"" + fi + + if [[ -n "${FONT}" ]]; then + if [[ ${PN##*-} = misc ]] || [[ ${PN##*-} = 75dpi ]] || [[ ${PN##*-} = 100dpi ]] || [[ ${PN##*-} = cyrillic ]]; then + IUSE="${IUSE} nls" + fi + fi +fi + +# If we're a driver package +if [[ "${PN/#xf86-video}" != "${PN}" ]] || [[ "${PN/#xf86-input}" != "${PN}" ]]; then + # Enable driver code in the rest of the eclass + DRIVER="yes" +fi + +# Debugging -- ignore packages that can't be built with debugging +if [[ -z "${FONT}" ]] \ + && [[ "${CATEGORY/app-doc}" = "${CATEGORY}" ]] \ + && [[ "${CATEGORY/x11-proto}" = "${CATEGORY}" ]] \ + && [[ "${PN/util-macros}" = "${PN}" ]] \ + && [[ "${PN/xbitmaps}" = "${PN}" ]] \ + && [[ "${PN/xkbdata}" = "${PN}" ]] \ + && [[ "${PN/xorg-cf-files}" = "${PN}" ]] \ + && [[ "${PN/xcursor}" = "${PN}" ]] \ + ; then + DEBUGGABLE="yes" + IUSE="${IUSE} debug" +fi + +DEPEND="${DEPEND} + >=dev-util/pkgconfig-0.18[lib32?]" + +if [[ "${PN/util-macros}" = "${PN}" ]]; then + DEPEND="${DEPEND} + >=x11-misc/util-macros-1.3.0 + sys-devel/binutils" +fi + +RDEPEND="${RDEPEND} + !<=x11-base/xorg-x11-6.9" +# Provides virtual/x11 for temporary use until packages are ported +# x11-base/x11-env" + +# @FUNCTION: x-modular_specs_check +# @USAGE: +# @DESCRIPTION: +# Make any necessary changes related to gcc specs (generally hardened) +x-modular_specs_check() { + if [[ ${PN:0:11} = "xorg-server" ]] || [[ -n "${DRIVER}" ]]; then + append-ldflags -Wl,-z,lazy + # (#116698) breaks loading + filter-ldflags -Wl,-z,now + fi +} + +# @FUNCTION: x-modular_dri_check +# @USAGE: +# @DESCRIPTION: +# Ensures the server supports DRI if building a driver with DRI support +x-modular_dri_check() { + # (#120057) Enabling DRI in drivers requires that the server was built with + # support for it + # Starting with xorg-server 1.5.3, DRI support is always enabled unless + # USE=minimal is set (see bug #252084) + if [[ -n "${DRIVER}" ]]; then + if has dri ${IUSE} && use dri; then + einfo "Checking for direct rendering capabilities ..." + if has_version '>=x11-base/xorg-server-1.5.3'; then + if built_with_use x11-base/xorg-server minimal; then + die "You must build x11-base/xorg-server with USE=-minimal." + fi + else + if ! built_with_use x11-base/xorg-server dri; then + die "You must build x11-base/xorg-server with USE=dri." + fi + fi + fi + fi +} + +# @FUNCTION: x-modular_server_supports_drivers_check +# @USAGE: +# @DESCRIPTION: +# Ensures the server SDK is installed if a driver is being built +x-modular_server_supports_drivers_check() { + # (#135873) Only certain servers will actually use or be capable of + # building external drivers, including binary drivers. + if [[ -n "${DRIVER}" ]]; then + if has_version '>=x11-base/xorg-server-1.1'; then + if ! built_with_use x11-base/xorg-server xorg; then + eerror "x11-base/xorg-server is not built with support for external drivers." + die "You must build x11-base/xorg-server with USE=xorg." + fi + fi + fi +} + +# @FUNCTION: x-modular_unpack_source +# @USAGE: +# @DESCRIPTION: +# Simply unpack source code. Nothing else. +x-modular_unpack_source() { + if [[ -n ${GIT_ECLASS} ]]; then + git_src_unpack + else + unpack ${A} + fi + cd "${S}" + + if [[ -n ${FONT_OPTIONS} ]]; then + einfo "Detected font directory: ${FONT_DIR}" + fi +} + +# @FUNCTION: x-modular_patch_source +# @USAGE: +# @DESCRIPTION: +# Apply all patches +x-modular_patch_source() { + # Use standardized names and locations with bulk patching + # Patch directory is ${WORKDIR}/patch + # See epatch() in eutils.eclass for more documentation + if [[ -z "${EPATCH_SUFFIX}" ]] ; then + EPATCH_SUFFIX="patch" + fi + +# @VARIABLE: PATCHES +# @DESCRIPTION: +# If you have any patches to apply, set PATCHES to their locations and epatch +# will apply them. It also handles epatch-style bulk patches, if you know how to +# use them and set the correct variables. If you don't, read eutils.eclass. + if [[ ${#PATCHES[@]} -gt 1 ]]; then + for x in "${PATCHES[@]}"; do + epatch "${x}" + done + elif [[ -n "${PATCHES}" ]]; then + for x in ${PATCHES}; do + epatch "${x}" + done + # For non-default directory bulk patching + elif [[ -n "${PATCH_LOC}" ]] ; then + epatch ${PATCH_LOC} + # For standard bulk patching + elif [[ -d "${EPATCH_SOURCE}" ]] ; then + epatch + fi +} + +# @FUNCTION: x-modular_reconf_source +# @USAGE: +# @DESCRIPTION: +# Run eautoreconf if necessary, and run elibtoolize. +x-modular_reconf_source() { + if [[ "${SNAPSHOT}" = "yes" ]] + then + # If possible, generate configure if it doesn't exist + if [ -f "./configure.ac" ] + then + eautoreconf + fi + fi + + # Joshua Baergen - October 23, 2005 + # Fix shared lib issues on MIPS, FBSD, etc etc + elibtoolize +} + +# @FUNCTION: x-modular_src_prepare +# @USAGE: +# @DESCRIPTION: +# Prepare a package after unpacking, performing all X-related tasks. +x-modular_src_prepare() { + [[ -n ${GIT_ECLASS} ]] && has src_prepare ${EXPORTED_FUNCTIONS} \ + && git_src_prepare + x-modular_patch_source + x-modular_reconf_source +} + +# @FUNCTION: x-modular_src_unpack +# @USAGE: +# @DESCRIPTION: +# Unpack a package, performing all X-related tasks. +x-modular_src_unpack() { + x-modular_specs_check + x-modular_server_supports_drivers_check + x-modular_dri_check + x-modular_unpack_source + has src_prepare ${EXPORTED_FUNCTIONS} || x-modular_src_prepare +} + +# @FUNCTION: x-modular_font_configure +# @USAGE: +# @DESCRIPTION: +# If a font package, perform any necessary configuration steps +x-modular_font_configure() { + if [[ -n "${FONT}" ]]; then + # Might be worth adding an option to configure your desired font + # and exclude all others. Also, should this USE be nls or minimal? + if has nls ${IUSE//+} && ! use nls; then + FONT_OPTIONS="${FONT_OPTIONS} + --disable-iso8859-2 + --disable-iso8859-3 + --disable-iso8859-4 + --disable-iso8859-5 + --disable-iso8859-6 + --disable-iso8859-7 + --disable-iso8859-8 + --disable-iso8859-9 + --disable-iso8859-10 + --disable-iso8859-11 + --disable-iso8859-12 + --disable-iso8859-13 + --disable-iso8859-14 + --disable-iso8859-15 + --disable-iso8859-16 + --disable-jisx0201 + --disable-koi8-r" + fi + fi +} + +# @FUNCTION: x-modular_debug_setup +# @USAGE: +# @DESCRIPTION: +# Set up CFLAGS for a debug build +x-modular_debug_setup() { + if [[ -n "${DEBUGGABLE}" ]]; then + if use debug; then + strip-flags + append-flags -g + fi + fi +} + +# @FUNCTION: x-modular_src_configure +# @USAGE: +# @DESCRIPTION: +# Perform any necessary pre-configuration steps, then run configure +x-modular_src_configure() { + x-modular_font_configure + x-modular_debug_setup + +# @VARIABLE: CONFIGURE_OPTIONS +# @DESCRIPTION: +# Any extra options to pass to configure + + # If prefix isn't set here, .pc files cause problems + if [[ -x ${ECONF_SOURCE:-.}/configure ]]; then + econf --prefix=${XDIR} \ + --datadir=${XDIR}/share \ + ${FONT_OPTIONS} \ + ${DRIVER_OPTIONS} \ + ${CONFIGURE_OPTIONS} + fi +} + +# @FUNCTION: x-modular_src_make +# @USAGE: +# @DESCRIPTION: +# Run make. +x-modular_src_make() { + emake || die "emake failed" +} + +# @FUNCTION: x-modular_src_compile +# @USAGE: +# @DESCRIPTION: +# Compile a package, performing all X-related tasks. +x-modular_src_compile() { + has src_configure ${EXPORTED_FUNCTIONS} || x-modular_src_configure + x-modular_src_make +} + +# @FUNCTION: x-modular_src_install +# @USAGE: +# @DESCRIPTION: +# Install a built package to ${D}, performing any necessary steps. +# Creates a ChangeLog from git if using live ebuilds. +x-modular_src_install() { + # Install everything to ${XDIR} + if [[ ${CATEGORY} = x11-proto ]]; then + make \ + ${PN/proto/}docdir=/usr/share/doc/${PF} \ + DESTDIR="${D}" \ + install \ + || die + else + make \ + docdir=/usr/share/doc/${PF} \ + DESTDIR="${D}" \ + install \ + || die + fi +# Shouldn't be necessary in XDIR=/usr +# einstall forces datadir, so we need to re-force it +# datadir=${XDIR}/share \ +# mandir=${XDIR}/share/man \ + + if [[ -n ${GIT_ECLASS} ]]; then + pushd "${EGIT_STORE_DIR}/${EGIT_CLONE_DIR}" + git log ${GIT_TREE} > "${S}"/ChangeLog + popd + fi + + if [[ -e ${S}/ChangeLog ]]; then + dodoc "${S}"/ChangeLog + fi +# @VARIABLE: DOCS +# @DESCRIPTION: +# Any documentation to install via dodoc + [[ -n ${DOCS} ]] && dodoc ${DOCS} + + # Don't install libtool archives for server modules + if [[ -e ${D}/usr/$(get_libdir)/xorg/modules ]]; then + find "${D}"/usr/$(get_libdir)/xorg/modules -name '*.la' \ + | xargs rm -f + fi + + if [[ -n "${FONT}" ]]; then + remove_font_metadata + fi + + if [[ -n "${DRIVER}" ]]; then + install_driver_hwdata + fi +} + +# @FUNCTION: x-modular_pkg_preinst +# @USAGE: +# @DESCRIPTION: +# This function doesn't do anything right now, but it may in the future. +x-modular_pkg_preinst() { + # We no longer do anything here, but we can't remove it from the API + : +} + +# @FUNCTION: x-modular_pkg_postinst +# @USAGE: +# @DESCRIPTION: +# Run X-specific post-installation tasks on the live filesystem. The +# only task right now is some setup for font packages. +x-modular_pkg_postinst() { + if [[ -n "${FONT}" ]]; then + setup_fonts + fi +} + +# @FUNCTION: x-modular_pkg_postrm +# @USAGE: +# @DESCRIPTION: +# Run X-specific post-removal tasks on the live filesystem. The only +# task right now is some cleanup for font packages. +x-modular_pkg_postrm() { + if [[ -n "${FONT}" ]]; then + font_pkg_postrm + fi +} + +# @FUNCTION: setup_fonts +# @USAGE: +# @DESCRIPTION: +# Generates needed files for fonts and fixes font permissions +setup_fonts() { + if [[ ! -n "${FONT_DIR}" ]]; then + msg="FONT_DIR is empty. The ebuild should set it to at least one subdir of /usr/share/fonts." + eerror "${msg}" + die "${msg}" + fi + + create_fonts_scale + create_fonts_dir + create_font_cache +} + +# @FUNCTION: remove_font_metadata +# @USAGE: +# @DESCRIPTION: +# Don't let the package install generated font files that may overlap +# with other packages. Instead, they're generated in pkg_postinst(). +remove_font_metadata() { + local DIR + for DIR in ${FONT_DIR}; do + if [[ "${DIR}" != "Speedo" ]] && \ + [[ "${DIR}" != "CID" ]] ; then + # Delete font metadata files + # fonts.scale, fonts.dir, fonts.cache-1 + rm -f "${D}"/usr/share/fonts/${DIR}/fonts.{scale,dir,cache-1} + fi + done +} + +# @FUNCTION: install_driver_hwdata +# @USAGE: +# @DESCRIPTION: +# Installs device-to-driver mappings for system-config-display and +# anything else that uses hwdata. +install_driver_hwdata() { + insinto /usr/share/hwdata/videoaliases + for i in "${FILESDIR}"/*.xinf; do + # We need this for the case when none exist, + # so *.xinf doesn't expand + if [[ -e $i ]]; then + doins $i + fi + done +} + +# @FUNCTION: discover_font_dirs +# @USAGE: +# @DESCRIPTION: +# Deprecated. Sets up the now-unused FONT_DIRS variable. +discover_font_dirs() { + FONT_DIRS="${FONT_DIR}" +} + +# @FUNCTION: create_fonts_scale +# @USAGE: +# @DESCRIPTION: +# Create fonts.scale file, used by the old server-side fonts subsystem. +create_fonts_scale() { + ebegin "Creating fonts.scale files" + local x + for DIR in ${FONT_DIR}; do + x=${ROOT}/usr/share/fonts/${DIR} + [[ -z "$(ls ${x}/)" ]] && continue + [[ "$(ls ${x}/)" = "fonts.cache-1" ]] && continue + + # Only generate .scale files if truetype, opentype or type1 + # fonts are present ... + + # NOTE: There is no way to regenerate Speedo/CID fonts.scale + # 2 August 2004 + if [[ "${x/encodings}" = "${x}" ]] \ + && [[ -n "$(find ${x} -iname '*.[pot][ft][abcf]' -print)" ]]; then + mkfontscale \ + -a "${ROOT}"/usr/share/fonts/encodings/encodings.dir \ + -- ${x} + fi + done + eend 0 +} + +# @FUNCTION: create_fonts_dir +# @USAGE: +# @DESCRIPTION: +# Create fonts.dir file, used by the old server-side fonts subsystem. +create_fonts_dir() { + ebegin "Generating fonts.dir files" + for DIR in ${FONT_DIR}; do + x=${ROOT}/usr/share/fonts/${DIR} + [[ -z "$(ls ${x}/)" ]] && continue + [[ "$(ls ${x}/)" = "fonts.cache-1" ]] && continue + + if [[ "${x/encodings}" = "${x}" ]]; then + mkfontdir \ + -e "${ROOT}"/usr/share/fonts/encodings \ + -e "${ROOT}"/usr/share/fonts/encodings/large \ + -- ${x} + fi + done + eend 0 +} + +# @FUNCTION: create_font_cache +# @USAGE: +# @DESCRIPTION: +# Create fonts.cache-1 files, used by the new client-side fonts +# subsystem. +create_font_cache() { + font_pkg_postinst +} diff --git a/eclass/xorg-2.eclass b/eclass/xorg-2.eclass new file mode 100644 index 0000000..33281e5 --- /dev/null +++ b/eclass/xorg-2.eclass @@ -0,0 +1,413 @@ +# Copyright 1999-2010 Gentoo Foundation +# Distributed under the terms of the GNU General Public License v2 +# $Header: /var/cvsroot/gentoo-x86/eclass/xorg-2.eclass,v 1.6 2010/07/14 08:34:27 scarabeus Exp $ +# +# @ECLASS: xorg-2.eclass +# @MAINTAINER: +# x11@gentoo.org +# @BLURB: Reduces code duplication in the modularized X11 ebuilds. +# @DESCRIPTION: +# This eclass makes trivial X ebuilds possible for apps, fonts, drivers, +# and more. Many things that would normally be done in various functions +# can be accessed by setting variables instead, such as patching, +# running eautoreconf, passing options to configure and installing docs. +# +# All you need to do in a basic ebuild is inherit this eclass and set +# DESCRIPTION, KEYWORDS and RDEPEND/DEPEND. If your package is hosted +# with the other X packages, you don't need to set SRC_URI. Pretty much +# everything else should be automatic. + +# Author: Tomáš Chvátal +# Author: Donnie Berkholz + +MULTILIB_EXT_SOURCE_BUILD=yes + +GIT_ECLASS="" +if [[ ${PV} == *9999* ]]; then + GIT_ECLASS="git" + XORG_EAUTORECONF="yes" + SRC_URI="" +fi + +# If we're a font package, but not the font.alias one +FONT_ECLASS="" +if [[ ${PN} == font* \ + && ${CATEGORY} = media-fonts \ + && ${PN} != font-alias \ + && ${PN} != font-util ]]; then + # Activate font code in the rest of the eclass + FONT="yes" + FONT_ECLASS="font" +fi + +inherit eutils base libtool multilib toolchain-funcs flag-o-matic autotools \ + ${FONT_ECLASS} ${GIT_ECLASS} + +EXPORTED_FUNCTIONS="src_unpack src_compile src_install pkg_postinst pkg_postrm" +case "${EAPI:-0}" in + 3) EXPORTED_FUNCTIONS="${EXPORTED_FUNCTIONS} src_prepare src_configure" ;; + *) DEPEND="EAPI-UNSUPPORTED" ;; +esac + +# exports must be ALWAYS after inherit +EXPORT_FUNCTIONS ${EXPORTED_FUNCTIONS} + +IUSE="" +HOMEPAGE="http://xorg.freedesktop.org/" + +# @ECLASS-VARIABLE: XORG_EAUTORECONF +# @DESCRIPTION: +# If set to 'yes' and configure.ac exists, eautoreconf will run. Set +# before inheriting this eclass. +: ${XORG_EAUTORECONF:="no"} + +# Set up SRC_URI for individual modular releases +BASE_INDIVIDUAL_URI="http://xorg.freedesktop.org/releases/individual" +# @ECLASS-VARIABLE: MODULE +# @DESCRIPTION: +# The subdirectory to download source from. Possible settings are app, +# doc, data, util, driver, font, lib, proto, xserver. Set above the +# inherit to override the default autoconfigured module. +if [[ -z ${MODULE} ]]; then + MODULE="" + case ${CATEGORY} in + app-doc) MODULE="doc" ;; + media-fonts) MODULE="font" ;; + x11-apps|x11-wm) MODULE="app" ;; + x11-misc|x11-themes) MODULE="util" ;; + x11-drivers) MODULE="driver" ;; + x11-base) MODULE="xserver" ;; + x11-proto) MODULE="proto" ;; + x11-libs) MODULE="lib" ;; + esac +fi + +if [[ -n ${GIT_ECLASS} ]]; then + EGIT_REPO_URI="git://anongit.freedesktop.org/git/xorg/${MODULE}/${PN}" +else + SRC_URI+=" ${BASE_INDIVIDUAL_URI}/${MODULE}/${P}.tar.bz2" +fi + +: ${SLOT:=0} + +# Set the license for the package. This can be overridden by setting +# LICENSE after the inherit. Nearly all FreeDesktop-hosted X packages +# are under the MIT license. (This is what Red Hat does in their rpms) +: ${LICENSE:=MIT} + +# Set up shared dependencies +if [[ ${XORG_EAUTORECONF} != no ]]; then + DEPEND+=" + >=sys-devel/libtool-2.2.6a + sys-devel/m4" + # This MUST BE STABLE + if [[ ${PN} != util-macros ]] ; then + DEPEND+=" >=x11-misc/util-macros-1.8.0" + # Required even by xorg-server + [[ ${PN} == "font-util" ]] || DEPEND+=" >=media-fonts/font-util-1.1.1-r1" + fi + WANT_AUTOCONF="latest" + WANT_AUTOMAKE="latest" +fi + +if [[ ${FONT} == yes ]]; then + RDEPEND+=" media-fonts/encodings + x11-apps/mkfontscale + x11-apps/mkfontdir" + PDEPEND+=" media-fonts/font-alias" + + # @ECLASS-VARIABLE: FONT_DIR + # @DESCRIPTION: + # If you're creating a font package and the suffix of PN is not equal to + # the subdirectory of /usr/share/fonts/ it should install into, set + # FONT_DIR to that directory or directories. Set before inheriting this + # eclass. + [[ -z ${FONT_DIR} ]] && FONT_DIR=${PN##*-} + + # Fix case of font directories + FONT_DIR=${FONT_DIR/ttf/TTF} + FONT_DIR=${FONT_DIR/otf/OTF} + FONT_DIR=${FONT_DIR/type1/Type1} + FONT_DIR=${FONT_DIR/speedo/Speedo} + + # Set up configure options, wrapped so ebuilds can override if need be + [[ -z ${FONT_OPTIONS} ]] && FONT_OPTIONS="--with-fontdir=\"${EPREFIX}/usr/share/fonts/${FONT_DIR}\"" + + [[ ${PN##*-} = misc || ${PN##*-} = 75dpi || ${PN##*-} = 100dpi || ${PN##*-} = cyrillic ]] && IUSE+=" nls" +fi + +# If we're a driver package, then enable DRIVER case +[[ ${PN} == xf86-video-* || ${PN} == xf86-input-* ]] && DRIVER="yes" + +# @ECLASS-VARIABLE: XORG_STATIC +# @DESCRIPTION: +# Enables static-libs useflag. Set to no, if your package gets: +# +# QA: configure: WARNING: unrecognized options: --disable-static +: ${XORG_STATIC:="yes"} + +# Add static-libs useflag where usefull. +if [[ ${XORG_STATIC} == yes \ + && ${FONT} != yes \ + && ${CATEGORY} != app-doc \ + && ${CATEGORY} != x11-proto \ + && ${CATEGORY} != x11-drivers \ + && ${CATEGORY} != media-fonts \ + && ${PN} != util-macros \ + && ${PN} != xbitmaps \ + && ${PN} != xorg-cf-files \ + && ${PN/xcursor} = ${PN} ]]; then + IUSE+=" static-libs" +fi + +DEPEND+=" >=dev-util/pkgconfig-0.23" + +# Check deps on xorg-server +has dri ${IUSE//+} && DEPEND+=" dri? ( >=x11-base/xorg-server-1.6.3.901-r2[-minimal] )" +[[ -n "${DRIVER}" ]] && DEPEND+=" x11-base/xorg-server[xorg]" + +# @FUNCTION: xorg-2_pkg_setup +# @USAGE: +# @DESCRIPTION: +# Setup prefix compat +xorg-2_pkg_setup() { + [[ ${FONT} == yes ]] && font_pkg_setup +} + +# @FUNCTION: xorg-2_src_unpack +# @USAGE: +# @DESCRIPTION: +# Simply unpack source code. +xorg-2_src_unpack() { + if [[ -n ${GIT_ECLASS} ]]; then + git_src_unpack + else + unpack ${A} + fi + + [[ -n ${FONT_OPTIONS} ]] && einfo "Detected font directory: ${FONT_DIR}" +} + +# @FUNCTION: xorg-2_patch_source +# @USAGE: +# @DESCRIPTION: +# Apply all patches +xorg-2_patch_source() { + # Use standardized names and locations with bulk patching + # Patch directory is ${WORKDIR}/patch + # See epatch() in eutils.eclass for more documentation + EPATCH_SUFFIX=${EPATCH_SUFFIX:=patch} + + [[ -d "${EPATCH_SOURCE}" ]] && epatch + base_src_prepare +} + +# @FUNCTION: xorg-2_reconf_source +# @USAGE: +# @DESCRIPTION: +# Run eautoreconf if necessary, and run elibtoolize. +xorg-2_reconf_source() { + case ${CHOST} in + *-interix* | *-aix* | *-winnt*) + # some hosts need full eautoreconf + [[ -e "./configure.ac" || -e "./configure.in" ]] && eautoreconf || ewarn "Unable to autoreconf the configure script. Things may fail." + ;; + *) + # elibtoolize required for BSD + [[ ${XORG_EAUTORECONF} != no && -e "./configure.ac" ]] && eautoreconf || elibtoolize + ;; + esac +} + +# @FUNCTION: xorg-2_src_prepare +# @USAGE: +# @DESCRIPTION: +# Prepare a package after unpacking, performing all X-related tasks. +xorg-2_src_prepare() { + [[ -n ${GIT_ECLASS} ]] && git_src_prepare + xorg-2_patch_source + xorg-2_reconf_source +} + +# @FUNCTION: xorg-2_font_configure +# @USAGE: +# @DESCRIPTION: +# If a font package, perform any necessary configuration steps +xorg-2_font_configure() { + if has nls ${IUSE//+} && ! use nls; then + FONT_OPTIONS+=" + --disable-iso8859-2 + --disable-iso8859-3 + --disable-iso8859-4 + --disable-iso8859-5 + --disable-iso8859-6 + --disable-iso8859-7 + --disable-iso8859-8 + --disable-iso8859-9 + --disable-iso8859-10 + --disable-iso8859-11 + --disable-iso8859-12 + --disable-iso8859-13 + --disable-iso8859-14 + --disable-iso8859-15 + --disable-iso8859-16 + --disable-jisx0201 + --disable-koi8-r" + fi +} + +# @FUNCTION: x-modular_flags_setup +# @USAGE: +# @DESCRIPTION: +# Set up CFLAGS for a debug build +xorg-2_flags_setup() { + # Win32 require special define + [[ ${CHOST} == *-winnt* ]] && append-cppflags -DWIN32 -D__STDC__ + # hardened ldflags + [[ ${PN} = xorg-server || -n ${DRIVER} ]] && append-ldflags -Wl,-z,lazy +} + +# @FUNCTION: xorg-2_src_configure +# @USAGE: +# @DESCRIPTION: +# Perform any necessary pre-configuration steps, then run configure +xorg-2_src_configure() { + local myopts="" + + xorg-2_flags_setup + [[ -n "${FONT}" ]] && xorg-2_font_configure + +# @VARIABLE: CONFIGURE_OPTIONS +# @DESCRIPTION: +# Any options to pass to configure + CONFIGURE_OPTIONS=${CONFIGURE_OPTIONS:=""} + if [[ -x ${ECONF_SOURCE:-.}/configure ]]; then + if has static-libs ${IUSE//+}; then + myopts+=" $(use_enable static-libs static)" + fi + econf \ + ${FONT_OPTIONS} \ + ${CONFIGURE_OPTIONS} \ + ${myopts} + fi +} + +# @FUNCTION: xorg-2_src_compile +# @USAGE: +# @DESCRIPTION: +# Compile a package, performing all X-related tasks. +xorg-2_src_compile() { + base_src_compile +} + +# @FUNCTION: xorg-2_src_install +# @USAGE: +# @DESCRIPTION: +# Install a built package to ${D}, performing any necessary steps. +# Creates a ChangeLog from git if using live ebuilds. +xorg-2_src_install() { + if [[ ${CATEGORY} == x11-proto ]]; then + emake \ + ${PN/proto/}docdir=${EPREFIX}/usr/share/doc/${PF} \ + DESTDIR="${D}" \ + install || die "emake install failed" + else + emake \ + docdir=${EPREFIX}/usr/share/doc/${PF} \ + DESTDIR="${D}" \ + install || die "emake install failed" + fi + + if [[ -n ${GIT_ECLASS} ]]; then + pushd "${EGIT_STORE_DIR}/${EGIT_CLONE_DIR}" > /dev/null + git log ${GIT_TREE} > "${S}"/ChangeLog + popd > /dev/null + fi + + if [[ -e "${S}"/ChangeLog ]]; then + dodoc "${S}"/ChangeLog + fi +# @VARIABLE: DOCS +# @DESCRIPTION: +# Any documentation to install + if [[ -n ${DOCS} ]]; then + dodoc ${DOCS} || die "dodoc failed" + fi + + # Don't install libtool archives for server modules + if [[ -e "${D%/}${EPREFIX}/usr/$(get_libdir)/xorg/modules" ]]; then + find "${D%/}${EPREFIX}/usr/$(get_libdir)/xorg/modules" -name '*.la' \ + -exec rm -f {} ';' + fi + + [[ -n ${FONT} ]] && remove_font_metadata +} + +# @FUNCTION: xorg-2_pkg_postinst +# @USAGE: +# @DESCRIPTION: +# Run X-specific post-installation tasks on the live filesystem. The +# only task right now is some setup for font packages. +xorg-2_pkg_postinst() { + [[ -n ${FONT} ]] && setup_fonts +} + +# @FUNCTION: xorg-2_pkg_postrm +# @USAGE: +# @DESCRIPTION: +# Run X-specific post-removal tasks on the live filesystem. The only +# task right now is some cleanup for font packages. +xorg-2_pkg_postrm() { + if [[ -n ${FONT} ]]; then + font_pkg_postrm + fi +} + +# @FUNCTION: setup_fonts +# @USAGE: +# @DESCRIPTION: +# Generates needed files for fonts and fixes font permissions +setup_fonts() { + create_fonts_scale + create_fonts_dir + font_pkg_postinst +} + +# @FUNCTION: remove_font_metadata +# @USAGE: +# @DESCRIPTION: +# Don't let the package install generated font files that may overlap +# with other packages. Instead, they're generated in pkg_postinst(). +remove_font_metadata() { + if [[ ${FONT_DIR} != Speedo && ${FONT_DIR} != CID ]]; then + einfo "Removing font metadata" + rm -rf "${ED}"/usr/share/fonts/${FONT_DIR}/fonts.{scale,dir,cache-1} + fi +} + +# @FUNCTION: create_fonts_scale +# @USAGE: +# @DESCRIPTION: +# Create fonts.scale file, used by the old server-side fonts subsystem. +create_fonts_scale() { + if [[ ${FONT_DIR} != Speedo && ${FONT_DIR} != CID ]]; then + ebegin "Generating font.scale" + mkfontscale \ + -a "${EROOT}/usr/share/fonts/encodings/encodings.dir" \ + -- "${EROOT}/usr/share/fonts/${FONT_DIR}" + eend $? + fi +} + +# @FUNCTION: create_fonts_dir +# @USAGE: +# @DESCRIPTION: +# Create fonts.dir file, used by the old server-side fonts subsystem. +create_fonts_dir() { + ebegin "Generating fonts.dir" + mkfontdir \ + -e "${EROOT}"/usr/share/fonts/encodings \ + -e "${EROOT}"/usr/share/fonts/encodings/large \ + -- "${EROOT}/usr/share/fonts/${FONT_DIR}" + eend $? +}